diff --git a/src/datafactory/HISTORY.rst b/src/datafactory/HISTORY.rst index f4e5240e156..01a0f93ca2b 100644 --- a/src/datafactory/HISTORY.rst +++ b/src/datafactory/HISTORY.rst @@ -3,6 +3,11 @@ Release History =============== +0.5.0 ++++++ +* az datafactory managed-virtual-network: Support create/update/list/show managed virtual network. +* az datafactory managed-private-endpoint: Support create/update/list/show/delete managed private endpoint. + 0.4.0 +++++ * GA the whole module diff --git a/src/datafactory/azext_datafactory/__init__.py b/src/datafactory/azext_datafactory/__init__.py index 4de09e21613..68dc1a4c888 100644 --- a/src/datafactory/azext_datafactory/__init__.py +++ b/src/datafactory/azext_datafactory/__init__.py @@ -7,13 +7,10 @@ # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- +# pylint: disable=unused-import +import azext_datafactory._help from azure.cli.core import AzCommandsLoader -from azext_datafactory.generated._help import helps # pylint: disable=unused-import -try: - from azext_datafactory.manual._help import helps # pylint: disable=reimported -except ImportError: - pass class DataFactoryManagementClientCommandsLoader(AzCommandsLoader): @@ -33,8 +30,11 @@ def load_command_table(self, args): try: from azext_datafactory.manual.commands import load_command_table as load_command_table_manual load_command_table_manual(self, args) - except ImportError: - pass + except ImportError as e: + if e.name.endswith('manual.commands'): + pass + else: + raise e return self.command_table def load_arguments(self, command): @@ -43,8 +43,11 @@ def load_arguments(self, command): try: from azext_datafactory.manual._params import load_arguments as load_arguments_manual load_arguments_manual(self, command) - except ImportError: - pass + except ImportError as e: + if e.name.endswith('manual._params'): + pass + else: + raise e COMMAND_LOADER_CLS = DataFactoryManagementClientCommandsLoader diff --git a/src/datafactory/azext_datafactory/_help.py b/src/datafactory/azext_datafactory/_help.py new file mode 100644 index 00000000000..9b93f87a6e9 --- /dev/null +++ b/src/datafactory/azext_datafactory/_help.py @@ -0,0 +1,20 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wildcard-import +# pylint: disable=unused-wildcard-import +# pylint: disable=unused-import +from .generated._help import helps # pylint: disable=reimported +try: + from .manual._help import helps # pylint: disable=reimported +except ImportError as e: + if e.name.endswith('manual._help'): + pass + else: + raise e diff --git a/src/datafactory/azext_datafactory/action.py b/src/datafactory/azext_datafactory/action.py index d95d53bf711..9b3d0a8a78c 100644 --- a/src/datafactory/azext_datafactory/action.py +++ b/src/datafactory/azext_datafactory/action.py @@ -13,5 +13,8 @@ from .generated.action import * # noqa: F403 try: from .manual.action import * # noqa: F403 -except ImportError: - pass +except ImportError as e: + if e.name.endswith('manual.action'): + pass + else: + raise e diff --git a/src/datafactory/azext_datafactory/custom.py b/src/datafactory/azext_datafactory/custom.py index dbe9d5f9742..885447229d6 100644 --- a/src/datafactory/azext_datafactory/custom.py +++ b/src/datafactory/azext_datafactory/custom.py @@ -13,5 +13,8 @@ from .generated.custom import * # noqa: F403 try: from .manual.custom import * # noqa: F403 -except ImportError: - pass +except ImportError as e: + if e.name.endswith('manual.custom'): + pass + else: + raise e diff --git a/src/datafactory/azext_datafactory/generated/_client_factory.py b/src/datafactory/azext_datafactory/generated/_client_factory.py index 7db87b484da..7f3f4f6fc12 100644 --- a/src/datafactory/azext_datafactory/generated/_client_factory.py +++ b/src/datafactory/azext_datafactory/generated/_client_factory.py @@ -54,3 +54,11 @@ def cf_trigger(cli_ctx, *_): def cf_trigger_run(cli_ctx, *_): return cf_datafactory_cl(cli_ctx).trigger_runs + + +def cf_managed_virtual_network(cli_ctx, *_): + return cf_datafactory_cl(cli_ctx).managed_virtual_networks + + +def cf_managed_private_endpoint(cli_ctx, *_): + return cf_datafactory_cl(cli_ctx).managed_private_endpoints diff --git a/src/datafactory/azext_datafactory/generated/_help.py b/src/datafactory/azext_datafactory/generated/_help.py index fd2ab1dcd0e..b48ae278922 100644 --- a/src/datafactory/azext_datafactory/generated/_help.py +++ b/src/datafactory/azext_datafactory/generated/_help.py @@ -12,10 +12,10 @@ from knack.help_files import helps -helps['datafactory'] = """ +helps['datafactory'] = ''' type: group - short-summary: Manage factory with datafactory -""" + short-summary: Manage Data Factory +''' helps['datafactory list'] = """ type: command @@ -447,11 +447,6 @@ helps['datafactory linked-service update'] = """ type: command short-summary: "Update a linked service." - examples: - - name: LinkedServices_Update - text: |- - az datafactory linked-service update --factory-name "exampleFactoryName" --description "Example \ -description" --name "exampleLinkedService" --resource-group "exampleResourceGroup" """ helps['datafactory linked-service delete'] = """ @@ -512,13 +507,6 @@ Usage: --folder name=XX name: The name of the folder that this Dataset is in. - examples: - - name: Datasets_Update - text: |- - az datafactory dataset update --description "Example description" --linked-service-name \ -"{\\"type\\":\\"LinkedServiceReference\\",\\"referenceName\\":\\"exampleLinkedService\\"}" --parameters \ -"{\\"MyFileName\\":{\\"type\\":\\"String\\"},\\"MyFolderPath\\":{\\"type\\":\\"String\\"}}" --name "exampleDataset" \ ---factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" """ helps['datafactory dataset delete'] = """ @@ -756,11 +744,6 @@ helps['datafactory trigger update'] = """ type: command short-summary: "Update a trigger." - examples: - - name: Triggers_Update - text: |- - az datafactory trigger update --factory-name "exampleFactoryName" --resource-group \ -"exampleResourceGroup" --description "Example description" --name "exampleTrigger" """ helps['datafactory trigger delete'] = """ @@ -902,3 +885,97 @@ az datafactory trigger-run rerun --factory-name "exampleFactoryName" --resource-group \ "exampleResourceGroup" --run-id "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b" --trigger-name "exampleTrigger" """ + +helps['datafactory managed-virtual-network'] = """ + type: group + short-summary: Manage managed virtual network with datafactory +""" + +helps['datafactory managed-virtual-network list'] = """ + type: command + short-summary: "Lists managed Virtual Networks." + examples: + - name: ManagedVirtualNetworks_ListByFactory + text: |- + az datafactory managed-virtual-network list --factory-name "exampleFactoryName" --resource-group \ +"exampleResourceGroup" +""" + +helps['datafactory managed-virtual-network show'] = """ + type: command + short-summary: "Gets a managed Virtual Network." + examples: + - name: ManagedVirtualNetworks_Get + text: |- + az datafactory managed-virtual-network show --factory-name "exampleFactoryName" --name \ +"exampleManagedVirtualNetworkName" --resource-group "exampleResourceGroup" +""" + +helps['datafactory managed-virtual-network create'] = """ + type: command + short-summary: "Create a managed Virtual Network." + examples: + - name: ManagedVirtualNetworks_Create + text: |- + az datafactory managed-virtual-network create --factory-name "exampleFactoryName" --name \ +"exampleManagedVirtualNetworkName" --resource-group "exampleResourceGroup" +""" + +helps['datafactory managed-virtual-network update'] = """ + type: command + short-summary: "Update a managed Virtual Network." +""" + +helps['datafactory managed-private-endpoint'] = """ + type: group + short-summary: Manage managed private endpoint with datafactory +""" + +helps['datafactory managed-private-endpoint list'] = """ + type: command + short-summary: "Lists managed private endpoints." + examples: + - name: ManagedPrivateEndpoints_ListByFactory + text: |- + az datafactory managed-private-endpoint list --factory-name "exampleFactoryName" \ +--managed-virtual-network-name "exampleManagedVirtualNetworkName" --resource-group "exampleResourceGroup" +""" + +helps['datafactory managed-private-endpoint show'] = """ + type: command + short-summary: "Gets a managed private endpoint." + examples: + - name: ManagedPrivateEndpoints_Get + text: |- + az datafactory managed-private-endpoint show --factory-name "exampleFactoryName" --name \ +"exampleManagedPrivateEndpointName" --managed-virtual-network-name "exampleManagedVirtualNetworkName" --resource-group \ +"exampleResourceGroup" +""" + +helps['datafactory managed-private-endpoint create'] = """ + type: command + short-summary: "Create a managed private endpoint." + examples: + - name: ManagedPrivateEndpoints_Create + text: |- + az datafactory managed-private-endpoint create --factory-name "exampleFactoryName" --group-id "blob" \ +--private-link-resource-id "/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/prov\ +iders/Microsoft.Storage/storageAccounts/exampleBlobStorage" --name "exampleManagedPrivateEndpointName" \ +--managed-virtual-network-name "exampleManagedVirtualNetworkName" --resource-group "exampleResourceGroup" +""" + +helps['datafactory managed-private-endpoint update'] = """ + type: command + short-summary: "Update a managed private endpoint." +""" + +helps['datafactory managed-private-endpoint delete'] = """ + type: command + short-summary: "Deletes a managed private endpoint." + examples: + - name: ManagedPrivateEndpoints_Delete + text: |- + az datafactory managed-private-endpoint delete --factory-name "exampleFactoryName" --name \ +"exampleManagedPrivateEndpointName" --managed-virtual-network-name "exampleManagedVirtualNetworkName" --resource-group \ +"exampleResourceGroup" +""" diff --git a/src/datafactory/azext_datafactory/generated/_params.py b/src/datafactory/azext_datafactory/generated/_params.py index 2162b81c231..76633d40136 100644 --- a/src/datafactory/azext_datafactory/generated/_params.py +++ b/src/datafactory/azext_datafactory/generated/_params.py @@ -56,7 +56,7 @@ def load_arguments(self, _): c.argument('factory_git_hub_configuration', action=AddFactoryGitHubConfiguration, nargs='+', help='Factory\'s ' 'GitHub repo information.', arg_group='RepoConfiguration') c.argument('global_parameters', type=validate_file_or_dict, help='List of parameters for factory. Expected ' - 'value: json-string/@json-file.') + 'value: json-string/json-file/@json-file.') with self.argument_context('datafactory update') as c: c.argument('resource_group_name', resource_group_name_type) @@ -134,9 +134,10 @@ def load_arguments(self, _): 'update, for which it should match existing entity or can be * for unconditional update.') c.argument('description', type=str, help='Integration runtime description.') c.argument('compute_properties', type=validate_file_or_dict, help='The compute resource for managed ' - 'integration runtime. Expected value: json-string/@json-file.', arg_group='Type Properties') + 'integration runtime. Expected value: json-string/json-file/@json-file.', arg_group='Type ' + 'Properties') c.argument('ssis_properties', type=validate_file_or_dict, help='SSIS properties for managed integration ' - 'runtime. Expected value: json-string/@json-file.', arg_group='Type Properties') + 'runtime. Expected value: json-string/json-file/@json-file.', arg_group='Type Properties') with self.argument_context('datafactory integration-runtime self-hosted create') as c: c.argument('resource_group_name', resource_group_name_type) @@ -147,7 +148,7 @@ def load_arguments(self, _): 'update, for which it should match existing entity or can be * for unconditional update.') c.argument('description', type=str, help='Integration runtime description.') c.argument('linked_info', type=validate_file_or_dict, help='The base definition of a linked integration ' - 'runtime. Expected value: json-string/@json-file.', arg_group='Type Properties') + 'runtime. Expected value: json-string/json-file/@json-file.', arg_group='Type Properties') with self.argument_context('datafactory integration-runtime update') as c: c.argument('resource_group_name', resource_group_name_type) @@ -285,7 +286,7 @@ def load_arguments(self, _): c.argument('if_match', type=str, help='ETag of the linkedService entity. Should only be specified for update, ' 'for which it should match existing entity or can be * for unconditional update.') c.argument('properties', type=validate_file_or_dict, help='Properties of linked service. Expected value: ' - 'json-string/@json-file.') + 'json-string/json-file/@json-file.') with self.argument_context('datafactory linked-service update') as c: c.argument('resource_group_name', resource_group_name_type) @@ -295,12 +296,12 @@ def load_arguments(self, _): c.argument('if_match', type=str, help='ETag of the linkedService entity. Should only be specified for update, ' 'for which it should match existing entity or can be * for unconditional update.') c.argument('connect_via', type=validate_file_or_dict, help='The integration runtime reference. Expected value: ' - 'json-string/@json-file.') + 'json-string/json-file/@json-file.') c.argument('description', type=str, help='Linked service description.') c.argument('parameters', type=validate_file_or_dict, help='Parameters for linked service. Expected value: ' - 'json-string/@json-file.') + 'json-string/json-file/@json-file.') c.argument('annotations', type=validate_file_or_dict, help='List of tags that can be used for describing the ' - 'linked service. Expected value: json-string/@json-file.') + 'linked service. Expected value: json-string/json-file/@json-file.') c.ignore('linked_service') with self.argument_context('datafactory linked-service delete') as c: @@ -329,7 +330,7 @@ def load_arguments(self, _): c.argument('if_match', type=str, help='ETag of the dataset entity. Should only be specified for update, for ' 'which it should match existing entity or can be * for unconditional update.') c.argument('properties', type=validate_file_or_dict, help='Dataset properties. Expected value: ' - 'json-string/@json-file.') + 'json-string/json-file/@json-file.') with self.argument_context('datafactory dataset update') as c: c.argument('resource_group_name', resource_group_name_type) @@ -341,16 +342,16 @@ def load_arguments(self, _): c.argument('description', type=str, help='Dataset description.') c.argument('structure', type=validate_file_or_dict, help='Columns that define the structure of the dataset. ' 'Type: array (or Expression with resultType array), itemType: DatasetDataElement. Expected value: ' - 'json-string/@json-file.') + 'json-string/json-file/@json-file.') c.argument('schema', type=validate_file_or_dict, help='Columns that define the physical type schema of the ' 'dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. ' - 'Expected value: json-string/@json-file.') + 'Expected value: json-string/json-file/@json-file.') c.argument('linked_service_name', type=validate_file_or_dict, help='Linked service reference. Expected value: ' - 'json-string/@json-file.') + 'json-string/json-file/@json-file.') c.argument('parameters', type=validate_file_or_dict, help='Parameters for dataset. Expected value: ' - 'json-string/@json-file.') + 'json-string/json-file/@json-file.') c.argument('annotations', type=validate_file_or_dict, help='List of tags that can be used for describing the ' - 'Dataset. Expected value: json-string/@json-file.') + 'Dataset. Expected value: json-string/json-file/@json-file.') c.argument('folder', action=AddFolder, nargs='+', help='The folder that this Dataset is in. If not specified, ' 'Dataset will appear at the root level.') c.ignore('dataset') @@ -381,7 +382,7 @@ def load_arguments(self, _): c.argument('if_match', type=str, help='ETag of the pipeline entity. Should only be specified for update, for ' 'which it should match existing entity or can be * for unconditional update.') c.argument('pipeline', type=validate_file_or_dict, help='Pipeline resource definition. Expected value: ' - 'json-string/@json-file.') + 'json-string/json-file/@json-file.') with self.argument_context('datafactory pipeline update') as c: c.argument('resource_group_name', resource_group_name_type) @@ -392,18 +393,19 @@ def load_arguments(self, _): 'which it should match existing entity or can be * for unconditional update.') c.argument('description', type=str, help='The description of the pipeline.') c.argument('activities', type=validate_file_or_dict, help='List of activities in pipeline. Expected value: ' - 'json-string/@json-file.') + 'json-string/json-file/@json-file.') c.argument('parameters', type=validate_file_or_dict, help='List of parameters for pipeline. Expected value: ' - 'json-string/@json-file.') + 'json-string/json-file/@json-file.') c.argument('variables', type=validate_file_or_dict, help='List of variables for pipeline. Expected value: ' - 'json-string/@json-file.') + 'json-string/json-file/@json-file.') c.argument('concurrency', type=int, help='The max number of concurrent runs for the pipeline.') c.argument('annotations', type=validate_file_or_dict, help='List of tags that can be used for describing the ' - 'Pipeline. Expected value: json-string/@json-file.') + 'Pipeline. Expected value: json-string/json-file/@json-file.') c.argument('run_dimensions', type=validate_file_or_dict, help='Dimensions emitted by Pipeline. Expected value: ' - 'json-string/@json-file.') + 'json-string/json-file/@json-file.') c.argument('duration', type=validate_file_or_dict, help='TimeSpan value, after which an Azure Monitoring ' - 'Metric is fired. Expected value: json-string/@json-file.', arg_group='Policy Elapsed Time Metric') + 'Metric is fired. Expected value: json-string/json-file/@json-file.', arg_group='Policy Elapsed ' + 'Time Metric') c.argument('folder_name', type=str, help='The name of the folder that this Pipeline is in.', arg_group='Folder') c.ignore('pipeline') @@ -430,7 +432,7 @@ def load_arguments(self, _): 'rerun will start from failed activities. The property will be used only if startActivityName is ' 'not specified.') c.argument('parameters', type=validate_file_or_dict, help='Parameters of the pipeline run. These parameters ' - 'will be used only if the runId is not specified. Expected value: json-string/@json-file.') + 'will be used only if the runId is not specified. Expected value: json-string/json-file/@json-file.') with self.argument_context('datafactory pipeline-run show') as c: c.argument('resource_group_name', resource_group_name_type) @@ -489,7 +491,7 @@ def load_arguments(self, _): c.argument('if_match', type=str, help='ETag of the trigger entity. Should only be specified for update, for ' 'which it should match existing entity or can be * for unconditional update.') c.argument('properties', type=validate_file_or_dict, help='Properties of the trigger. Expected value: ' - 'json-string/@json-file.') + 'json-string/json-file/@json-file.') with self.argument_context('datafactory trigger update') as c: c.argument('resource_group_name', resource_group_name_type) @@ -500,7 +502,7 @@ def load_arguments(self, _): 'which it should match existing entity or can be * for unconditional update.') c.argument('description', type=str, help='Trigger description.') c.argument('annotations', type=validate_file_or_dict, help='List of tags that can be used for describing the ' - 'trigger. Expected value: json-string/@json-file.') + 'trigger. Expected value: json-string/json-file/@json-file.') c.ignore('trigger') with self.argument_context('datafactory trigger delete') as c: @@ -578,3 +580,87 @@ def load_arguments(self, _): c.argument('factory_name', type=str, help='The factory name.', id_part='name') c.argument('trigger_name', type=str, help='The trigger name.', id_part='child_name_1') c.argument('run_id', type=str, help='The pipeline run identifier.', id_part='child_name_2') + + with self.argument_context('datafactory managed-virtual-network list') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', type=str, help='The factory name.') + + with self.argument_context('datafactory managed-virtual-network show') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', type=str, help='The factory name.', id_part='name') + c.argument('managed_virtual_network_name', options_list=['--name', '-n', '--managed-virtual-network-name'], + type=str, help='Managed virtual network name', id_part='child_name_1') + c.argument('if_none_match', type=str, help='ETag of the managed Virtual Network entity. Should only be ' + 'specified for get. If the ETag matches the existing entity tag, or if * was provided, then no ' + 'content will be returned.') + + with self.argument_context('datafactory managed-virtual-network create') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', type=str, help='The factory name.') + c.argument('managed_virtual_network_name', options_list=['--name', '-n', '--managed-virtual-network-name'], + type=str, help='Managed virtual network name') + c.argument('if_match', type=str, help='ETag of the managed Virtual Network entity. Should only be specified ' + 'for update, for which it should match existing entity or can be * for unconditional update.') + + with self.argument_context('datafactory managed-virtual-network update') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', type=str, help='The factory name.', id_part='name') + c.argument('managed_virtual_network_name', options_list=['--name', '-n', '--managed-virtual-network-name'], + type=str, help='Managed virtual network name', id_part='child_name_1') + c.argument('if_match', type=str, help='ETag of the managed Virtual Network entity. Should only be specified ' + 'for update, for which it should match existing entity or can be * for unconditional update.') + c.ignore('managed_virtual_network') + + with self.argument_context('datafactory managed-private-endpoint list') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', type=str, help='The factory name.') + c.argument('managed_virtual_network_name', options_list=['--managed-virtual-network-name', '--mvnet-name'], + type=str, help='Managed virtual network name') + + with self.argument_context('datafactory managed-private-endpoint show') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', type=str, help='The factory name.', id_part='name') + c.argument('managed_virtual_network_name', options_list=['--managed-virtual-network-name', '--mvnet-name'], + type=str, help='Managed virtual network name', id_part='child_name_1') + c.argument('managed_private_endpoint_name', options_list=['--name', '-n', '--managed-private-endpoint-name'], + type=str, help='Managed private endpoint name', id_part='child_name_2') + c.argument('if_none_match', type=str, help='ETag of the managed private endpoint entity. Should only be ' + 'specified for get. If the ETag matches the existing entity tag, or if * was provided, then no ' + 'content will be returned.') + + with self.argument_context('datafactory managed-private-endpoint create') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', type=str, help='The factory name.') + c.argument('managed_virtual_network_name', options_list=['--managed-virtual-network-name', '--mvnet-name'], + type=str, help='Managed virtual network name') + c.argument('managed_private_endpoint_name', options_list=['--name', '-n', '--managed-private-endpoint-name'], + type=str, help='Managed private endpoint name') + c.argument('if_match', type=str, help='ETag of the managed private endpoint entity. Should only be specified ' + 'for update, for which it should match existing entity or can be * for unconditional update.') + c.argument('fqdns', nargs='+', help='Fully qualified domain names') + c.argument('group_id', type=str, help='The groupId to which the managed private endpoint is created') + c.argument('private_link_resource_id', options_list=['--private-link-resource-id', '--private-link'], type=str, + help='The ARM resource ID of the resource to which the managed private endpoint is created') + + with self.argument_context('datafactory managed-private-endpoint update') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', type=str, help='The factory name.', id_part='name') + c.argument('managed_virtual_network_name', options_list=['--managed-virtual-network-name', '--mvnet-name'], + type=str, help='Managed virtual network name', id_part='child_name_1') + c.argument('managed_private_endpoint_name', options_list=['--name', '-n', '--managed-private-endpoint-name'], + type=str, help='Managed private endpoint name', id_part='child_name_2') + c.argument('if_match', type=str, help='ETag of the managed private endpoint entity. Should only be specified ' + 'for update, for which it should match existing entity or can be * for unconditional update.') + c.argument('fqdns', nargs='+', help='Fully qualified domain names') + c.argument('group_id', type=str, help='The groupId to which the managed private endpoint is created') + c.argument('private_link_resource_id', options_list=['--private-link-resource-id', '--private-link'], type=str, + help='The ARM resource ID of the resource to which the managed private endpoint is created') + c.ignore('managed_private_endpoint') + + with self.argument_context('datafactory managed-private-endpoint delete') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', type=str, help='The factory name.', id_part='name') + c.argument('managed_virtual_network_name', options_list=['--managed-virtual-network-name', '--mvnet-name'], + type=str, help='Managed virtual network name', id_part='child_name_1') + c.argument('managed_private_endpoint_name', options_list=['--name', '-n', '--managed-private-endpoint-name'], + type=str, help='Managed private endpoint name', id_part='child_name_2') diff --git a/src/datafactory/azext_datafactory/generated/action.py b/src/datafactory/azext_datafactory/generated/action.py index f645d72981a..8737ce3fbb2 100644 --- a/src/datafactory/azext_datafactory/generated/action.py +++ b/src/datafactory/azext_datafactory/generated/action.py @@ -7,8 +7,13 @@ # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- + + # pylint: disable=protected-access +# pylint: disable=no-self-use + + import argparse from collections import defaultdict from knack.util import CLIError @@ -19,7 +24,7 @@ def __call__(self, parser, namespace, values, option_string=None): action = self.get_action(values, option_string) namespace.factory_vsts_configuration = action - def get_action(self, values, option_string): # pylint: disable=no-self-use + def get_action(self, values, option_string): try: properties = defaultdict(list) for (k, v) in (x.split('=', 1) for x in values): @@ -31,25 +36,37 @@ def get_action(self, values, option_string): # pylint: disable=no-self-use for k in properties: kl = k.lower() v = properties[k] + if kl == 'project-name': d['project_name'] = v[0] + elif kl == 'tenant-id': d['tenant_id'] = v[0] + elif kl == 'account-name': d['account_name'] = v[0] + elif kl == 'repository-name': d['repository_name'] = v[0] + elif kl == 'collaboration-branch': d['collaboration_branch'] = v[0] + elif kl == 'root-folder': d['root_folder'] = v[0] + elif kl == 'last-commit-id': d['last_commit_id'] = v[0] + else: - raise CLIError('Unsupported Key {} is provided for parameter factory_vsts_configuration. All possible ' - 'keys are: project-name, tenant-id, account-name, repository-name, ' - 'collaboration-branch, root-folder, last-commit-id'.format(k)) + raise CLIError( + 'Unsupported Key {} is provided for parameter factory-vsts-configuration. All possible keys are:' + ' project-name, tenant-id, account-name, repository-name, collaboration-branch, root-folder,' + ' last-commit-id'.format(k) + ) + d['type'] = 'FactoryVSTSConfiguration' + return d @@ -58,7 +75,7 @@ def __call__(self, parser, namespace, values, option_string=None): action = self.get_action(values, option_string) namespace.factory_git_hub_configuration = action - def get_action(self, values, option_string): # pylint: disable=no-self-use + def get_action(self, values, option_string): try: properties = defaultdict(list) for (k, v) in (x.split('=', 1) for x in values): @@ -70,23 +87,34 @@ def get_action(self, values, option_string): # pylint: disable=no-self-use for k in properties: kl = k.lower() v = properties[k] + if kl == 'host-name': d['host_name'] = v[0] + elif kl == 'account-name': d['account_name'] = v[0] + elif kl == 'repository-name': d['repository_name'] = v[0] + elif kl == 'collaboration-branch': d['collaboration_branch'] = v[0] + elif kl == 'root-folder': d['root_folder'] = v[0] + elif kl == 'last-commit-id': d['last_commit_id'] = v[0] + else: - raise CLIError('Unsupported Key {} is provided for parameter factory_git_hub_configuration. All ' - 'possible keys are: host-name, account-name, repository-name, collaboration-branch, ' - 'root-folder, last-commit-id'.format(k)) + raise CLIError( + 'Unsupported Key {} is provided for parameter factory-git-hub-configuration. All possible keys are:' + ' host-name, account-name, repository-name, collaboration-branch, root-folder, last-commit-id' + .format(k) + ) + d['type'] = 'FactoryGitHubConfiguration' + return d @@ -95,7 +123,7 @@ def __call__(self, parser, namespace, values, option_string=None): action = self.get_action(values, option_string) namespace.folder = action - def get_action(self, values, option_string): # pylint: disable=no-self-use + def get_action(self, values, option_string): try: properties = defaultdict(list) for (k, v) in (x.split('=', 1) for x in values): @@ -107,11 +135,15 @@ def get_action(self, values, option_string): # pylint: disable=no-self-use for k in properties: kl = k.lower() v = properties[k] + if kl == 'name': d['name'] = v[0] + else: - raise CLIError('Unsupported Key {} is provided for parameter folder. All possible keys are: name'. - format(k)) + raise CLIError( + 'Unsupported Key {} is provided for parameter folder. All possible keys are: name'.format(k) + ) + return d @@ -120,7 +152,7 @@ def __call__(self, parser, namespace, values, option_string=None): action = self.get_action(values, option_string) super(AddFilters, self).__call__(parser, namespace, action, option_string) - def get_action(self, values, option_string): # pylint: disable=no-self-use + def get_action(self, values, option_string): try: properties = defaultdict(list) for (k, v) in (x.split('=', 1) for x in values): @@ -132,15 +164,22 @@ def get_action(self, values, option_string): # pylint: disable=no-self-use for k in properties: kl = k.lower() v = properties[k] + if kl == 'operand': d['operand'] = v[0] + elif kl == 'operator': d['operator'] = v[0] + elif kl == 'values': d['values'] = v + else: - raise CLIError('Unsupported Key {} is provided for parameter filters. All possible keys are: operand, ' - 'operator, values'.format(k)) + raise CLIError( + 'Unsupported Key {} is provided for parameter filters. All possible keys are: operand, operator,' + ' values'.format(k) + ) + return d @@ -149,7 +188,7 @@ def __call__(self, parser, namespace, values, option_string=None): action = self.get_action(values, option_string) super(AddOrderBy, self).__call__(parser, namespace, action, option_string) - def get_action(self, values, option_string): # pylint: disable=no-self-use + def get_action(self, values, option_string): try: properties = defaultdict(list) for (k, v) in (x.split('=', 1) for x in values): @@ -161,11 +200,17 @@ def get_action(self, values, option_string): # pylint: disable=no-self-use for k in properties: kl = k.lower() v = properties[k] + if kl == 'order-by': d['order_by'] = v[0] + elif kl == 'order': d['order'] = v[0] + else: - raise CLIError('Unsupported Key {} is provided for parameter order_by. All possible keys are: ' - 'order-by, order'.format(k)) + raise CLIError( + 'Unsupported Key {} is provided for parameter order-by. All possible keys are: order-by, order' + .format(k) + ) + return d diff --git a/src/datafactory/azext_datafactory/generated/commands.py b/src/datafactory/azext_datafactory/generated/commands.py index 83b7f9db34e..027d5c6638a 100644 --- a/src/datafactory/azext_datafactory/generated/commands.py +++ b/src/datafactory/azext_datafactory/generated/commands.py @@ -9,17 +9,112 @@ # -------------------------------------------------------------------------- # pylint: disable=too-many-statements # pylint: disable=too-many-locals +# pylint: disable=bad-continuation +# pylint: disable=line-too-long from azure.cli.core.commands import CliCommandType +from azext_datafactory.generated._client_factory import ( + cf_factory, + cf_integration_runtime, + cf_integration_runtime_node, + cf_linked_service, + cf_dataset, + cf_pipeline, + cf_pipeline_run, + cf_activity_run, + cf_trigger, + cf_trigger_run, + cf_managed_virtual_network, + cf_managed_private_endpoint, +) + + +datafactory_factory = CliCommandType( + operations_tmpl=( + 'azext_datafactory.vendored_sdks.datafactory.operations._factories_operations#FactoriesOperations.{}' + ), + client_factory=cf_factory, +) + + +datafactory_activity_run = CliCommandType( + operations_tmpl=( + 'azext_datafactory.vendored_sdks.datafactory.operations._activity_runs_operations#ActivityRunsOperations.{}' + ), + client_factory=cf_activity_run, +) + + +datafactory_dataset = CliCommandType( + operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._datasets_operations#DatasetsOperations.{}', + client_factory=cf_dataset, +) + + +datafactory_integration_runtime = CliCommandType( + operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._integration_runtimes_operations#IntegrationRuntimesOperations.{}', + client_factory=cf_integration_runtime, +) + + +datafactory_integration_runtime_node = CliCommandType( + operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._integration_runtime_nodes_operations#IntegrationRuntimeNodesOperations.{}', + client_factory=cf_integration_runtime_node, +) + + +datafactory_linked_service = CliCommandType( + operations_tmpl=( + 'azext_datafactory.vendored_sdks.datafactory.operations._linked_services_operations#LinkedServicesOperations.{}' + ), + client_factory=cf_linked_service, +) + + +datafactory_managed_private_endpoint = CliCommandType( + operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._managed_private_endpoints_operations#ManagedPrivateEndpointsOperations.{}', + client_factory=cf_managed_private_endpoint, +) + + +datafactory_managed_virtual_network = CliCommandType( + operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._managed_virtual_networks_operations#ManagedVirtualNetworksOperations.{}', + client_factory=cf_managed_virtual_network, +) + + +datafactory_pipeline = CliCommandType( + operations_tmpl=( + 'azext_datafactory.vendored_sdks.datafactory.operations._pipelines_operations#PipelinesOperations.{}' + ), + client_factory=cf_pipeline, +) + + +datafactory_pipeline_run = CliCommandType( + operations_tmpl=( + 'azext_datafactory.vendored_sdks.datafactory.operations._pipeline_runs_operations#PipelineRunsOperations.{}' + ), + client_factory=cf_pipeline_run, +) + + +datafactory_trigger = CliCommandType( + operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._triggers_operations#TriggersOperations.{}', + client_factory=cf_trigger, +) + + +datafactory_trigger_run = CliCommandType( + operations_tmpl=( + 'azext_datafactory.vendored_sdks.datafactory.operations._trigger_runs_operations#TriggerRunsOperations.{}' + ), + client_factory=cf_trigger_run, +) def load_command_table(self, _): - from azext_datafactory.generated._client_factory import cf_factory - datafactory_factory = CliCommandType( - operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._factories_operations#FactoriesOperatio' - 'ns.{}', - client_factory=cf_factory) with self.command_group('datafactory', datafactory_factory, client_factory=cf_factory) as g: g.custom_command('list', 'datafactory_list') g.custom_show_command('show', 'datafactory_show') @@ -30,17 +125,24 @@ def load_command_table(self, _): g.custom_command('get-data-plane-access', 'datafactory_get_data_plane_access') g.custom_command('get-git-hub-access-token', 'datafactory_get_git_hub_access_token') - from azext_datafactory.generated._client_factory import cf_integration_runtime - datafactory_integration_runtime = CliCommandType( - operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._integration_runtimes_operations#Integr' - 'ationRuntimesOperations.{}', - client_factory=cf_integration_runtime) - with self.command_group('datafactory integration-runtime', datafactory_integration_runtime, - client_factory=cf_integration_runtime) as g: + with self.command_group('datafactory activity-run', datafactory_activity_run, client_factory=cf_activity_run) as g: + g.custom_command('query-by-pipeline-run', 'datafactory_activity_run_query_by_pipeline_run') + + with self.command_group('datafactory dataset', datafactory_dataset, client_factory=cf_dataset) as g: + g.custom_command('list', 'datafactory_dataset_list') + g.custom_show_command('show', 'datafactory_dataset_show') + g.custom_command('create', 'datafactory_dataset_create') + g.generic_update_command('update', custom_func_name='datafactory_dataset_update', setter_arg_name='dataset') + g.custom_command('delete', 'datafactory_dataset_delete', confirmation=True) + + with self.command_group( + 'datafactory integration-runtime', datafactory_integration_runtime, client_factory=cf_integration_runtime + ) as g: g.custom_command('list', 'datafactory_integration_runtime_list') g.custom_show_command('show', 'datafactory_integration_runtime_show') - g.custom_command('linked-integration-runtime create', 'datafactory_integration_runtime_linked_integration_runti' - 'me_create') + g.custom_command( + 'linked-integration-runtime create', 'datafactory_integration_runtime_linked_integration_runtime_create' + ) g.custom_command('managed create', 'datafactory_integration_runtime_managed_create') g.custom_command('self-hosted create', 'datafactory_integration_runtime_self_hosted_create') g.custom_command('update', 'datafactory_integration_runtime_update') @@ -57,102 +159,85 @@ def load_command_table(self, _): g.custom_command('upgrade', 'datafactory_integration_runtime_upgrade') g.custom_wait_command('wait', 'datafactory_integration_runtime_show') - from azext_datafactory.generated._client_factory import cf_integration_runtime_node - datafactory_integration_runtime_node = CliCommandType( - operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._integration_runtime_nodes_operations#I' - 'ntegrationRuntimeNodesOperations.{}', - client_factory=cf_integration_runtime_node) - with self.command_group('datafactory integration-runtime-node', datafactory_integration_runtime_node, - client_factory=cf_integration_runtime_node) as g: + with self.command_group( + 'datafactory integration-runtime-node', + datafactory_integration_runtime_node, + client_factory=cf_integration_runtime_node, + ) as g: g.custom_show_command('show', 'datafactory_integration_runtime_node_show') g.custom_command('update', 'datafactory_integration_runtime_node_update') g.custom_command('delete', 'datafactory_integration_runtime_node_delete', confirmation=True) g.custom_command('get-ip-address', 'datafactory_integration_runtime_node_get_ip_address') - from azext_datafactory.generated._client_factory import cf_linked_service - datafactory_linked_service = CliCommandType( - operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._linked_services_operations#LinkedServi' - 'cesOperations.{}', - client_factory=cf_linked_service) - with self.command_group('datafactory linked-service', datafactory_linked_service, - client_factory=cf_linked_service) as g: + with self.command_group( + 'datafactory linked-service', datafactory_linked_service, client_factory=cf_linked_service + ) as g: g.custom_command('list', 'datafactory_linked_service_list') g.custom_show_command('show', 'datafactory_linked_service_show') g.custom_command('create', 'datafactory_linked_service_create') - g.generic_update_command('update', setter_arg_name='linked_service', - custom_func_name='datafactory_linked_service_update') + g.generic_update_command( + 'update', custom_func_name='datafactory_linked_service_update', setter_arg_name='linked_service' + ) g.custom_command('delete', 'datafactory_linked_service_delete', confirmation=True) - from azext_datafactory.generated._client_factory import cf_dataset - datafactory_dataset = CliCommandType( - operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._datasets_operations#DatasetsOperations' - '.{}', - client_factory=cf_dataset) - with self.command_group('datafactory dataset', datafactory_dataset, client_factory=cf_dataset) as g: - g.custom_command('list', 'datafactory_dataset_list') - g.custom_show_command('show', 'datafactory_dataset_show') - g.custom_command('create', 'datafactory_dataset_create') - g.generic_update_command('update', setter_arg_name='dataset', custom_func_name='datafactory_dataset_update') - g.custom_command('delete', 'datafactory_dataset_delete', confirmation=True) + with self.command_group( + 'datafactory managed-private-endpoint', + datafactory_managed_private_endpoint, + client_factory=cf_managed_private_endpoint, + is_preview=True, + ) as g: + g.custom_command('list', 'datafactory_managed_private_endpoint_list') + g.custom_show_command('show', 'datafactory_managed_private_endpoint_show') + g.custom_command('create', 'datafactory_managed_private_endpoint_create') + g.generic_update_command( + 'update', + custom_func_name='datafactory_managed_private_endpoint_update', + setter_arg_name='managed_private_endpoint', + ) + g.custom_command('delete', 'datafactory_managed_private_endpoint_delete', confirmation=True) + + with self.command_group( + 'datafactory managed-virtual-network', + datafactory_managed_virtual_network, + client_factory=cf_managed_virtual_network, + is_preview=True, + ) as g: + g.custom_command('list', 'datafactory_managed_virtual_network_list') + g.custom_show_command('show', 'datafactory_managed_virtual_network_show') + g.custom_command('create', 'datafactory_managed_virtual_network_create') + g.generic_update_command( + 'update', + custom_func_name='datafactory_managed_virtual_network_update', + setter_arg_name='managed_virtual_network', + ) - from azext_datafactory.generated._client_factory import cf_pipeline - datafactory_pipeline = CliCommandType( - operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._pipelines_operations#PipelinesOperatio' - 'ns.{}', - client_factory=cf_pipeline) with self.command_group('datafactory pipeline', datafactory_pipeline, client_factory=cf_pipeline) as g: g.custom_command('list', 'datafactory_pipeline_list') g.custom_show_command('show', 'datafactory_pipeline_show') g.custom_command('create', 'datafactory_pipeline_create') - g.generic_update_command('update', setter_arg_name='pipeline', custom_func_name='datafactory_pipeline_update') + g.generic_update_command('update', custom_func_name='datafactory_pipeline_update', setter_arg_name='pipeline') g.custom_command('delete', 'datafactory_pipeline_delete', confirmation=True) g.custom_command('create-run', 'datafactory_pipeline_create_run') - from azext_datafactory.generated._client_factory import cf_pipeline_run - datafactory_pipeline_run = CliCommandType( - operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._pipeline_runs_operations#PipelineRunsO' - 'perations.{}', - client_factory=cf_pipeline_run) - with self.command_group('datafactory pipeline-run', datafactory_pipeline_run, - client_factory=cf_pipeline_run) as g: + with self.command_group('datafactory pipeline-run', datafactory_pipeline_run, client_factory=cf_pipeline_run) as g: g.custom_show_command('show', 'datafactory_pipeline_run_show') g.custom_command('cancel', 'datafactory_pipeline_run_cancel') g.custom_command('query-by-factory', 'datafactory_pipeline_run_query_by_factory') - from azext_datafactory.generated._client_factory import cf_activity_run - datafactory_activity_run = CliCommandType( - operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._activity_runs_operations#ActivityRunsO' - 'perations.{}', - client_factory=cf_activity_run) - with self.command_group('datafactory activity-run', datafactory_activity_run, - client_factory=cf_activity_run) as g: - g.custom_command('query-by-pipeline-run', 'datafactory_activity_run_query_by_pipeline_run') - - from azext_datafactory.generated._client_factory import cf_trigger - datafactory_trigger = CliCommandType( - operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._triggers_operations#TriggersOperations' - '.{}', - client_factory=cf_trigger) with self.command_group('datafactory trigger', datafactory_trigger, client_factory=cf_trigger) as g: g.custom_command('list', 'datafactory_trigger_list') g.custom_show_command('show', 'datafactory_trigger_show') g.custom_command('create', 'datafactory_trigger_create') - g.generic_update_command('update', setter_arg_name='trigger', custom_func_name='datafactory_trigger_update') + g.generic_update_command('update', custom_func_name='datafactory_trigger_update', setter_arg_name='trigger') g.custom_command('delete', 'datafactory_trigger_delete', confirmation=True) g.custom_command('get-event-subscription-status', 'datafactory_trigger_get_event_subscription_status') g.custom_command('query-by-factory', 'datafactory_trigger_query_by_factory') g.custom_command('start', 'datafactory_trigger_start', supports_no_wait=True) g.custom_command('stop', 'datafactory_trigger_stop', supports_no_wait=True) g.custom_command('subscribe-to-event', 'datafactory_trigger_subscribe_to_event', supports_no_wait=True) - g.custom_command('unsubscribe-from-event', 'datafactory_trigger_unsubscribe_from_event', - supports_no_wait=True) + g.custom_command('unsubscribe-from-event', 'datafactory_trigger_unsubscribe_from_event', supports_no_wait=True) g.custom_wait_command('wait', 'datafactory_trigger_show') - from azext_datafactory.generated._client_factory import cf_trigger_run - datafactory_trigger_run = CliCommandType( - operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._trigger_runs_operations#TriggerRunsOpe' - 'rations.{}', - client_factory=cf_trigger_run) with self.command_group('datafactory trigger-run', datafactory_trigger_run, client_factory=cf_trigger_run) as g: g.custom_command('cancel', 'datafactory_trigger_run_cancel') g.custom_command('query-by-factory', 'datafactory_trigger_run_query_by_factory') diff --git a/src/datafactory/azext_datafactory/generated/custom.py b/src/datafactory/azext_datafactory/generated/custom.py index c269c1999ff..d9b21280a67 100644 --- a/src/datafactory/azext_datafactory/generated/custom.py +++ b/src/datafactory/azext_datafactory/generated/custom.py @@ -50,12 +50,20 @@ def datafactory_create(client, 'repo_configuration!') repo_configuration = all_repo_configuration[0] if len(all_repo_configuration) == 1 else None factory = {} - factory['location'] = location - factory['tags'] = tags - factory['repo_configuration'] = repo_configuration - factory['global_parameters'] = global_parameters + if location is not None: + factory['location'] = location + if tags is not None: + factory['tags'] = tags + if repo_configuration is not None: + factory['repo_configuration'] = repo_configuration + if global_parameters is not None: + factory['global_parameters'] = global_parameters factory['encryption'] = {} + if len(factory['encryption']) == 0: + del factory['encryption'] factory['identity'] = {} + if len(factory['identity']) == 0: + del factory['identity'] return client.create_or_update(resource_group_name=resource_group_name, factory_name=factory_name, if_match=if_match, @@ -67,8 +75,11 @@ def datafactory_update(client, factory_name, tags=None): factory_update_parameters = {} - factory_update_parameters['tags'] = tags + if tags is not None: + factory_update_parameters['tags'] = tags factory_update_parameters['identity'] = {} + if len(factory_update_parameters['identity']) == 0: + del factory_update_parameters['identity'] return client.update(resource_group_name=resource_group_name, factory_name=factory_name, factory_update_parameters=factory_update_parameters) @@ -96,8 +107,10 @@ def datafactory_configure_factory_repo(client, 'repo_configuration!') repo_configuration = all_repo_configuration[0] if len(all_repo_configuration) == 1 else None factory_repo_update = {} - factory_repo_update['factory_resource_id'] = factory_resource_id - factory_repo_update['repo_configuration'] = repo_configuration + if factory_resource_id is not None: + factory_repo_update['factory_resource_id'] = factory_resource_id + if repo_configuration is not None: + factory_repo_update['repo_configuration'] = repo_configuration return client.configure_factory_repo(location_id=location, factory_repo_update=factory_repo_update) @@ -111,11 +124,16 @@ def datafactory_get_data_plane_access(client, start_time=None, expire_time=None): policy = {} - policy['permissions'] = permissions - policy['access_resource_path'] = access_resource_path - policy['profile_name'] = profile_name - policy['start_time'] = start_time - policy['expire_time'] = expire_time + if permissions is not None: + policy['permissions'] = permissions + if access_resource_path is not None: + policy['access_resource_path'] = access_resource_path + if profile_name is not None: + policy['profile_name'] = profile_name + if start_time is not None: + policy['start_time'] = start_time + if expire_time is not None: + policy['expire_time'] = expire_time return client.get_data_plane_access(resource_group_name=resource_group_name, factory_name=factory_name, policy=policy) @@ -129,7 +147,8 @@ def datafactory_get_git_hub_access_token(client, git_hub_client_id=None): git_hub_access_token_request = {} git_hub_access_token_request['git_hub_access_code'] = git_hub_access_code - git_hub_access_token_request['git_hub_client_id'] = git_hub_client_id + if git_hub_client_id is not None: + git_hub_access_token_request['git_hub_client_id'] = git_hub_client_id git_hub_access_token_request['git_hub_access_token_base_url'] = git_hub_access_token_base_url return client.get_git_hub_access_token(resource_group_name=resource_group_name, factory_name=factory_name, @@ -163,10 +182,14 @@ def datafactory_integration_runtime_linked_integration_runtime_create(client, data_factory_name=None, location=None): create_linked_integration_runtime_request = {} - create_linked_integration_runtime_request['name'] = name - create_linked_integration_runtime_request['subscription_id'] = subscription_id - create_linked_integration_runtime_request['data_factory_name'] = data_factory_name - create_linked_integration_runtime_request['data_factory_location'] = location + if name is not None: + create_linked_integration_runtime_request['name'] = name + if subscription_id is not None: + create_linked_integration_runtime_request['subscription_id'] = subscription_id + if data_factory_name is not None: + create_linked_integration_runtime_request['data_factory_name'] = data_factory_name + if location is not None: + create_linked_integration_runtime_request['data_factory_location'] = location return client.create_linked_integration_runtime(resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, @@ -184,9 +207,12 @@ def datafactory_integration_runtime_managed_create(client, integration_runtime = {} integration_runtime['properties'] = {} integration_runtime['properties']['type'] = 'Managed' - integration_runtime['properties']['description'] = description - integration_runtime['properties']['compute_properties'] = compute_properties - integration_runtime['properties']['ssis_properties'] = ssis_properties + if description is not None: + integration_runtime['properties']['description'] = description + if compute_properties is not None: + integration_runtime['properties']['compute_properties'] = compute_properties + if ssis_properties is not None: + integration_runtime['properties']['ssis_properties'] = ssis_properties return client.create_or_update(resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, @@ -204,8 +230,10 @@ def datafactory_integration_runtime_self_hosted_create(client, integration_runtime = {} integration_runtime['properties'] = {} integration_runtime['properties']['type'] = 'SelfHosted' - integration_runtime['properties']['description'] = description - integration_runtime['properties']['linked_info'] = linked_info + if description is not None: + integration_runtime['properties']['description'] = description + if linked_info is not None: + integration_runtime['properties']['linked_info'] = linked_info return client.create_or_update(resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, @@ -220,8 +248,10 @@ def datafactory_integration_runtime_update(client, auto_update=None, update_delay_offset=None): update_integration_runtime_request = {} - update_integration_runtime_request['auto_update'] = auto_update - update_integration_runtime_request['update_delay_offset'] = update_delay_offset + if auto_update is not None: + update_integration_runtime_request['auto_update'] = auto_update + if update_delay_offset is not None: + update_integration_runtime_request['update_delay_offset'] = update_delay_offset return client.update(resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, @@ -279,7 +309,8 @@ def datafactory_integration_runtime_regenerate_auth_key(client, integration_runtime_name, key_name=None): regenerate_key_parameters = {} - regenerate_key_parameters['key_name'] = key_name + if key_name is not None: + regenerate_key_parameters['key_name'] = key_name return client.regenerate_auth_key(resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, @@ -359,7 +390,8 @@ def datafactory_integration_runtime_node_update(client, node_name, concurrent_jobs_limit=None): update_integration_runtime_node_request = {} - update_integration_runtime_node_request['concurrent_jobs_limit'] = concurrent_jobs_limit + if concurrent_jobs_limit is not None: + update_integration_runtime_node_request['concurrent_jobs_limit'] = concurrent_jobs_limit return client.update(resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, @@ -502,8 +534,7 @@ def datafactory_dataset_update(instance, instance.properties.structure = structure if schema is not None: instance.properties.schema = schema - if linked_service_name is not None: - instance.properties.linked_service_name = linked_service_name + instance.properties.linked_service_name = linked_service_name if parameters is not None: instance.properties.parameters = parameters if annotations is not None: @@ -645,11 +676,14 @@ def datafactory_pipeline_run_query_by_factory(client, filters=None, order_by=None): filter_parameters = {} - filter_parameters['continuation_token'] = continuation_token + if continuation_token is not None: + filter_parameters['continuation_token'] = continuation_token filter_parameters['last_updated_after'] = last_updated_after filter_parameters['last_updated_before'] = last_updated_before - filter_parameters['filters'] = filters - filter_parameters['order_by'] = order_by + if filters is not None: + filter_parameters['filters'] = filters + if order_by is not None: + filter_parameters['order_by'] = order_by return client.query_by_factory(resource_group_name=resource_group_name, factory_name=factory_name, filter_parameters=filter_parameters) @@ -665,11 +699,14 @@ def datafactory_activity_run_query_by_pipeline_run(client, filters=None, order_by=None): filter_parameters = {} - filter_parameters['continuation_token'] = continuation_token + if continuation_token is not None: + filter_parameters['continuation_token'] = continuation_token filter_parameters['last_updated_after'] = last_updated_after filter_parameters['last_updated_before'] = last_updated_before - filter_parameters['filters'] = filters - filter_parameters['order_by'] = order_by + if filters is not None: + filter_parameters['filters'] = filters + if order_by is not None: + filter_parameters['order_by'] = order_by return client.query_by_pipeline_run(resource_group_name=resource_group_name, factory_name=factory_name, run_id=run_id, @@ -747,8 +784,10 @@ def datafactory_trigger_query_by_factory(client, continuation_token=None, parent_trigger_name=None): filter_parameters = {} - filter_parameters['continuation_token'] = continuation_token - filter_parameters['parent_trigger_name'] = parent_trigger_name + if continuation_token is not None: + filter_parameters['continuation_token'] = continuation_token + if parent_trigger_name is not None: + filter_parameters['parent_trigger_name'] = parent_trigger_name return client.query_by_factory(resource_group_name=resource_group_name, factory_name=factory_name, filter_parameters=filter_parameters) @@ -822,11 +861,14 @@ def datafactory_trigger_run_query_by_factory(client, filters=None, order_by=None): filter_parameters = {} - filter_parameters['continuation_token'] = continuation_token + if continuation_token is not None: + filter_parameters['continuation_token'] = continuation_token filter_parameters['last_updated_after'] = last_updated_after filter_parameters['last_updated_before'] = last_updated_before - filter_parameters['filters'] = filters - filter_parameters['order_by'] = order_by + if filters is not None: + filter_parameters['filters'] = filters + if order_by is not None: + filter_parameters['order_by'] = order_by return client.query_by_factory(resource_group_name=resource_group_name, factory_name=factory_name, filter_parameters=filter_parameters) @@ -841,3 +883,121 @@ def datafactory_trigger_run_rerun(client, factory_name=factory_name, trigger_name=trigger_name, run_id=run_id) + + +def datafactory_managed_virtual_network_list(client, + resource_group_name, + factory_name): + return client.list_by_factory(resource_group_name=resource_group_name, + factory_name=factory_name) + + +def datafactory_managed_virtual_network_show(client, + resource_group_name, + factory_name, + managed_virtual_network_name, + if_none_match=None): + return client.get(resource_group_name=resource_group_name, + factory_name=factory_name, + managed_virtual_network_name=managed_virtual_network_name, + if_none_match=if_none_match) + + +def datafactory_managed_virtual_network_create(client, + resource_group_name, + factory_name, + managed_virtual_network_name, + if_match=None): + managed_virtual_network = {} + managed_virtual_network['properties'] = {} + return client.create_or_update(resource_group_name=resource_group_name, + factory_name=factory_name, + managed_virtual_network_name=managed_virtual_network_name, + if_match=if_match, + managed_virtual_network=managed_virtual_network) + + +def datafactory_managed_virtual_network_update(instance, + resource_group_name, + factory_name, + managed_virtual_network_name, + if_match=None): + return instance + + +def datafactory_managed_private_endpoint_list(client, + resource_group_name, + factory_name, + managed_virtual_network_name): + return client.list_by_factory(resource_group_name=resource_group_name, + factory_name=factory_name, + managed_virtual_network_name=managed_virtual_network_name) + + +def datafactory_managed_private_endpoint_show(client, + resource_group_name, + factory_name, + managed_virtual_network_name, + managed_private_endpoint_name, + if_none_match=None): + return client.get(resource_group_name=resource_group_name, + factory_name=factory_name, + managed_virtual_network_name=managed_virtual_network_name, + managed_private_endpoint_name=managed_private_endpoint_name, + if_none_match=if_none_match) + + +def datafactory_managed_private_endpoint_create(client, + resource_group_name, + factory_name, + managed_virtual_network_name, + managed_private_endpoint_name, + if_match=None, + fqdns=None, + group_id=None, + private_link_resource_id=None): + managed_private_endpoint = {} + managed_private_endpoint['properties'] = {} + if fqdns is not None: + managed_private_endpoint['properties']['fqdns'] = fqdns + if group_id is not None: + managed_private_endpoint['properties']['group_id'] = group_id + if private_link_resource_id is not None: + managed_private_endpoint['properties']['private_link_resource_id'] = private_link_resource_id + if len(managed_private_endpoint['properties']) == 0: + del managed_private_endpoint['properties'] + return client.create_or_update(resource_group_name=resource_group_name, + factory_name=factory_name, + managed_virtual_network_name=managed_virtual_network_name, + managed_private_endpoint_name=managed_private_endpoint_name, + if_match=if_match, + managed_private_endpoint=managed_private_endpoint) + + +def datafactory_managed_private_endpoint_update(instance, + resource_group_name, + factory_name, + managed_virtual_network_name, + managed_private_endpoint_name, + if_match=None, + fqdns=None, + group_id=None, + private_link_resource_id=None): + if fqdns is not None: + instance.properties.fqdns = fqdns + if group_id is not None: + instance.properties.group_id = group_id + if private_link_resource_id is not None: + instance.properties.private_link_resource_id = private_link_resource_id + return instance + + +def datafactory_managed_private_endpoint_delete(client, + resource_group_name, + factory_name, + managed_virtual_network_name, + managed_private_endpoint_name): + return client.delete(resource_group_name=resource_group_name, + factory_name=factory_name, + managed_virtual_network_name=managed_virtual_network_name, + managed_private_endpoint_name=managed_private_endpoint_name) diff --git a/src/datafactory/azext_datafactory/manual/tests/latest/test_datafactory_scenario.py b/src/datafactory/azext_datafactory/manual/tests/latest/test_datafactory_scenario.py index 64fc8cefe48..28519f30473 100644 --- a/src/datafactory/azext_datafactory/manual/tests/latest/test_datafactory_scenario.py +++ b/src/datafactory/azext_datafactory/manual/tests/latest/test_datafactory_scenario.py @@ -9,8 +9,45 @@ # -------------------------------------------------------------------------- +def step_dataset_update(test, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory dataset update ' + '--description "Example description" ' + '--linked-service-name "{{\\"type\\":\\"LinkedServiceReference\\",\\"referenceName\\":\\"{myLinkedService}' + '\\"}}" ' + '--parameters "{{\\"MyFileName\\":{{\\"type\\":\\"String\\"}},\\"MyFolderPath\\":{{\\"type\\":\\"String\\"' + '}}}}" ' + '--name "{myDataset}" ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}"', + checks=checks) + + +def step_linked_service_update(test, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory linked-service update ' + '--factory-name "{myFactory}" ' + '--description "Example description" ' + '--name "{myLinkedService}" ' + '--resource-group "{rg}"', + checks=checks) + + +def step_trigger_update(test, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory trigger update ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}" ' + '--description "Example description" ' + '--name "{myTrigger}"', + checks=checks) + + # EXAMPLE: IntegrationRuntimes_Create -def step_integration_runtime_create(test, rg): +def step_integration_runtime_create(test): test.cmd('az datafactory integration-runtime self-hosted create ' '--factory-name "{myFactory}" ' '--description "A selfhosted integration runtime" ' @@ -22,7 +59,7 @@ def step_integration_runtime_create(test, rg): ]) -def step_trigger_run_rerun(test, rg): +def step_trigger_run_rerun(test): test.cmd('az datafactory trigger-run rerun ' '--factory-name "{myFactory}" ' '--resource-group "{rg}" ' @@ -31,7 +68,7 @@ def step_trigger_run_rerun(test, rg): checks=[]) -def step_pipeline_create_run(test, rg): +def step_pipeline_create_run(test): output = test.cmd('az datafactory pipeline create-run ' '--factory-name "{myFactory}" ' '--parameters "{{\\"OutputBlobNameList\\":[\\"exampleoutput.csv\\"]}}" ' @@ -41,7 +78,7 @@ def step_pipeline_create_run(test, rg): return output -def step_pipeline_run_cancel(test, rg): +def step_pipeline_run_cancel(test): test.cmd('az datafactory pipeline-run cancel ' '--factory-name "{myFactory}" ' '--resource-group "{rg}" ' @@ -49,7 +86,7 @@ def step_pipeline_run_cancel(test, rg): checks=[]) -def step_pipeline_run_show(test, rg): +def step_pipeline_run_show(test): test.cmd('az datafactory pipeline-run show ' '--factory-name "{myFactory}" ' '--resource-group "{rg}" ' @@ -57,7 +94,7 @@ def step_pipeline_run_show(test, rg): checks=[]) -def step_pipeline_update(test, rg): +def step_pipeline_update(test): test.cmd('az datafactory pipeline update ' '--factory-name "{myFactory}" ' '--description "Test Update description" ' @@ -66,7 +103,7 @@ def step_pipeline_update(test, rg): checks=[]) -def step_trigger_run_query_by_factory(test, rg): +def step_trigger_run_query_by_factory(test): output = test.cmd('az datafactory trigger-run query-by-factory ' '--factory-name "{myFactory}" ' '--last-updated-after "{myStartTime}" ' @@ -76,7 +113,7 @@ def step_trigger_run_query_by_factory(test, rg): return output -def step_integration_runtime_managed_create(test, rg): +def step_integration_runtime_managed_create(test): test.cmd('az datafactory integration-runtime managed create ' '--factory-name "{myFactory}" ' '--name "{myIntegrationRuntime}" ' @@ -93,7 +130,7 @@ def step_integration_runtime_managed_create(test, rg): ]) -def step_pipeline_wait_create(test, rg): +def step_pipeline_wait_create(test): test.cmd('az datafactory pipeline create ' '--factory-name "{myFactory}" ' '--pipeline "{{\\"activities\\":[{{\\"name\\":\\"Wait1\\",' @@ -108,7 +145,7 @@ def step_pipeline_wait_create(test, rg): ]) -def step_trigger_tumble_create(test, rg): +def step_trigger_tumble_create(test): test.cmd('az datafactory trigger create ' '--resource-group "{rg}" ' '--properties "{{\\"description\\":\\"trumblingwindowtrigger' @@ -130,40 +167,40 @@ def step_trigger_tumble_create(test, rg): ]) -def call_managed_integrationruntime_scenario(test, rg): +def call_managed_integrationruntime_scenario(test): from ....tests.latest import test_datafactory_scenario as g - g.setup_scenario(test, rg) - g.step_create(test, rg) - step_integration_runtime_managed_create(test, rg) - g.step_integration_runtime_show(test, rg) + g.setup_main(test) + g.step_create(test) + step_integration_runtime_managed_create(test) + g.step_integration_runtime_show(test) test.kwargs.update({'myIntegrationRuntime2': test.kwargs.get('myIntegrationRuntime')}) - g.step_integration_runtime_start(test, rg) - g.step_integration_runtime_stop(test, rg) - g.step_integration_runtime_delete(test, rg) - g.step_delete(test, rg) - g.cleanup_scenario(test, rg) + g.step_integration_runtime_start(test) + g.step_integration_runtime_stop(test) + g.step_integration_runtime_delete(test) + g.step_delete(test) + g.cleanup_main(test) -def call_triggerrun_scenario(test, rg): +def call_triggerrun_scenario(test): from ....tests.latest import test_datafactory_scenario as g import time - g.setup_scenario(test, rg) - g.step_create(test, rg) - step_pipeline_wait_create(test, rg) - createrun_res = step_pipeline_create_run(test, rg) + g.setup_main(test) + g.step_create(test) + step_pipeline_wait_create(test) + createrun_res = step_pipeline_create_run(test) time.sleep(5) test.kwargs.update({'myRunId': createrun_res.get('runId')}) - step_pipeline_run_show(test, rg) - g.step_activity_run_query_by_pipeline_run(test, rg) - createrun_res = step_pipeline_create_run(test, rg) + step_pipeline_run_show(test) + g.step_activity_run_query_by_pipeline_run(test) + createrun_res = step_pipeline_create_run(test) test.kwargs.update({'myRunId': createrun_res.get('runId')}) - step_pipeline_run_cancel(test, rg) - step_trigger_tumble_create(test, rg) - g.step_trigger_start(test, rg) - g.step_trigger_show(test, rg) + step_pipeline_run_cancel(test) + step_trigger_tumble_create(test) + g.step_trigger_start(test) + g.step_trigger_show(test) maxRound = 2 while True: - triggerrun_res = step_trigger_run_query_by_factory(test, rg) + triggerrun_res = step_trigger_run_query_by_factory(test) if len(triggerrun_res['value']) > 0 and triggerrun_res['value'][0]['status'] == 'Succeeded': test.kwargs.update({'myRunId': triggerrun_res['value'][0]['triggerRunId']}) break @@ -175,77 +212,77 @@ def call_triggerrun_scenario(test, rg): else: break if maxRound > 0: - step_trigger_run_rerun(test, rg) - step_trigger_run_query_by_factory(test, rg) - g.step_trigger_stop(test, rg) - g.step_trigger_delete(test, rg) - g.step_pipeline_delete(test, rg) - g.step_delete(test, rg) - g.cleanup_scenario(test, rg) + step_trigger_run_rerun(test) + step_trigger_run_query_by_factory(test) + g.step_trigger_stop(test) + g.step_trigger_delete(test) + g.step_pipeline_delete(test) + g.step_delete(test) + g.cleanup_main(test) -def call_main_scenario(test, rg): +def call_main_scenario(test): from ....tests.latest import test_datafactory_scenario as g - g.setup_scenario(test, rg) - g.step_create(test, rg) - g.step_update(test, rg) - g.step_linked_service_create(test, rg) - g.step_linked_service_update(test, rg) - g.step_dataset_create(test, rg) - g.step_dataset_update(test, rg) - g.step_pipeline_create(test, rg) - step_pipeline_update(test, rg) - g.step_trigger_create(test, rg) - g.step_trigger_update(test, rg) - g.step_integration_runtime_self_hosted_create(test, rg) - g.step_integration_runtime_update(test, rg) - # g.step_integration_runtime_linked(test, rg) - step_pipeline_create_run(test, rg) - g.step_integration_runtime_show(test, rg) - g.step_linked_service_show(test, rg) - g.step_pipeline_show(test, rg) - g.step_dataset_show(test, rg) - g.step_trigger_show(test, rg) - g.step_integration_runtime_list(test, rg) - g.step_linked_service_list(test, rg) - g.step_pipeline_list(test, rg) - g.step_trigger_list(test, rg) - g.step_dataset_list(test, rg) - g.step_show(test, rg) - g.step_list2(test, rg) - g.step_list(test, rg) - g.step_integration_runtime_regenerate_auth_key(test, rg) - # g.step_integration_runtime_get_connection_info(test, rg) - g.step_integration_runtime_sync_credentials(test, rg) - g.step_integration_runtime_get_monitoring_data(test, rg) - g.step_integration_runtime_list_auth_key(test, rg) - g.step_integration_runtime_remove_link(test, rg) - g.step_integration_runtime_get_status(test, rg) - # g.step_integration_runtime_start(test, rg) - # g.step_integration_runtime_stop(test, rg) - # g.step_integrationruntimes_createlinkedintegrationruntime(test, rg) - g.step_trigger_get_event_subscription_status(test, rg) - # g.step_activity_run_query_by_pipeline_run(test, rg) - g.step_trigger_unsubscribe_from_event(test, rg) - g.step_trigger_subscribe_to_event(test, rg) - g.step_trigger_start(test, rg) - g.step_trigger_stop(test, rg) - # g.step_get_git_hub_access_token(test, rg) - g.step_get_data_plane_access(test, rg) - # g.step_pipeline_run_query_by_factory(test, rg) - # g.step_pipeline_run_cancel(test, rg) - step_trigger_run_query_by_factory(test, rg) - g.step_configure_factory_repo(test, rg) - g.step_integration_runtime_delete(test, rg) - g.step_trigger_delete(test, rg) - g.step_pipeline_delete(test, rg) - g.step_dataset_delete(test, rg) - g.step_linked_service_delete(test, rg) - g.step_delete(test, rg) - g.cleanup_scenario(test, rg) - - -def call_scenario(test, rg): + g.setup_main(test) + g.step_create(test) + g.step_update(test) + g.step_linked_service_create(test) + step_linked_service_update(test) + g.step_dataset_create(test) + step_dataset_update(test) + g.step_pipeline_create(test) + step_pipeline_update(test) + g.step_trigger_create(test) + step_trigger_update(test) + g.step_integration_runtime_self_hosted_create(test) + g.step_integration_runtime_update(test) + # g.step_integration_runtime_linked(test) + step_pipeline_create_run(test) + g.step_integration_runtime_show(test) + g.step_linked_service_show(test) + g.step_pipeline_show(test) + g.step_dataset_show(test) + g.step_trigger_show(test) + g.step_integration_runtime_list(test) + g.step_linked_service_list(test) + g.step_pipeline_list(test) + g.step_trigger_list(test) + g.step_dataset_list(test) + g.step_show(test) + g.step_list2(test) + g.step_list(test) + g.step_integration_runtime_regenerate_auth_key(test) + # g.step_integration_runtime_get_connection_info(test) + g.step_integration_runtime_sync_credentials(test) + g.step_integration_runtime_get_monitoring_data(test) + g.step_integration_runtime_list_auth_key(test) + g.step_integration_runtime_remove_link(test) + g.step_integration_runtime_get_status(test) + # g.step_integration_runtime_start(test) + # g.step_integration_runtime_stop(test) + # g.step_integrationruntimes_createlinkedintegrationruntime(test) + g.step_trigger_get_event_subscription_status(test) + # g.step_activity_run_query_by_pipeline_run(test) + g.step_trigger_unsubscribe_from_event(test) + g.step_trigger_subscribe_to_event(test) + g.step_trigger_start(test) + g.step_trigger_stop(test) + # g.step_get_git_hub_access_token(test) + g.step_get_data_plane_access(test) + # g.step_pipeline_run_query_by_factory(test) + # g.step_pipeline_run_cancel(test) + step_trigger_run_query_by_factory(test) + g.step_configure_factory_repo(test) + g.step_integration_runtime_delete(test) + g.step_trigger_delete(test) + g.step_pipeline_delete(test) + g.step_dataset_delete(test) + g.step_linked_service_delete(test) + g.step_delete(test) + g.cleanup_main(test) + + +def call_main(test): from datetime import datetime, timedelta now = datetime.utcnow() startTime = now.strftime("%Y-%m-%dT%H:%M:%SZ") @@ -255,6 +292,6 @@ def call_scenario(test, rg): 'myStartTime': startTime, 'myEndTime': endTime }) - call_main_scenario(test, rg) - call_managed_integrationruntime_scenario(test, rg) - call_triggerrun_scenario(test, rg) + call_main_scenario(test) + call_managed_integrationruntime_scenario(test) + call_triggerrun_scenario(test) diff --git a/src/datafactory/azext_datafactory/manual/version.py b/src/datafactory/azext_datafactory/manual/version.py index 8e3e0e73f37..75df532f317 100644 --- a/src/datafactory/azext_datafactory/manual/version.py +++ b/src/datafactory/azext_datafactory/manual/version.py @@ -8,4 +8,4 @@ # regenerated. # -------------------------------------------------------------------------- -VERSION = "0.4.0" +VERSION = "0.5.0" diff --git a/src/datafactory/azext_datafactory/tests/latest/example_steps.py b/src/datafactory/azext_datafactory/tests/latest/example_steps.py index 42222d4e576..0704716920c 100644 --- a/src/datafactory/azext_datafactory/tests/latest/example_steps.py +++ b/src/datafactory/azext_datafactory/tests/latest/example_steps.py @@ -8,15 +8,13 @@ # regenerated. # -------------------------------------------------------------------------- -# pylint: disable=unused-argument - from .. import try_manual # EXAMPLE: /Factories/put/Factories_CreateOrUpdate @try_manual -def step_create(test, rg, checks=None): +def step_create(test, checks=None): if checks is None: checks = [] test.cmd('az datafactory create ' @@ -28,7 +26,7 @@ def step_create(test, rg, checks=None): # EXAMPLE: /Factories/get/Factories_Get @try_manual -def step_show(test, rg, checks=None): +def step_show(test, checks=None): if checks is None: checks = [] test.cmd('az datafactory show ' @@ -39,7 +37,7 @@ def step_show(test, rg, checks=None): # EXAMPLE: /Factories/get/Factories_List @try_manual -def step_list(test, rg, checks=None): +def step_list(test, checks=None): if checks is None: checks = [] test.cmd('az datafactory list ' @@ -49,7 +47,7 @@ def step_list(test, rg, checks=None): # EXAMPLE: /Factories/get/Factories_ListByResourceGroup @try_manual -def step_list2(test, rg, checks=None): +def step_list2(test, checks=None): if checks is None: checks = [] test.cmd('az datafactory list ' @@ -59,7 +57,7 @@ def step_list2(test, rg, checks=None): # EXAMPLE: /Factories/patch/Factories_Update @try_manual -def step_update(test, rg, checks=None): +def step_update(test, checks=None): if checks is None: checks = [] test.cmd('az datafactory update ' @@ -71,7 +69,7 @@ def step_update(test, rg, checks=None): # EXAMPLE: /Factories/post/Factories_ConfigureFactoryRepo @try_manual -def step_configure_factory_repo(test, rg, checks=None): +def step_configure_factory_repo(test, checks=None): if checks is None: checks = [] test.cmd('az datafactory configure-factory-repo ' @@ -85,7 +83,7 @@ def step_configure_factory_repo(test, rg, checks=None): # EXAMPLE: /Factories/post/Factories_GetDataPlaneAccess @try_manual -def step_get_data_plane_access(test, rg, checks=None): +def step_get_data_plane_access(test, checks=None): if checks is None: checks = [] test.cmd('az datafactory get-data-plane-access ' @@ -101,7 +99,7 @@ def step_get_data_plane_access(test, rg, checks=None): # EXAMPLE: /Factories/post/Factories_GetGitHubAccessToken @try_manual -def step_get_git_hub_access_token(test, rg, checks=None): +def step_get_git_hub_access_token(test, checks=None): if checks is None: checks = [] test.cmd('az datafactory get-git-hub-access-token ' @@ -115,7 +113,7 @@ def step_get_git_hub_access_token(test, rg, checks=None): # EXAMPLE: /ActivityRuns/post/ActivityRuns_QueryByPipelineRun @try_manual -def step_activity_run_query_by_pipeline_run(test, rg, checks=None): +def step_activity_run_query_by_pipeline_run(test, checks=None): if checks is None: checks = [] test.cmd('az datafactory activity-run query-by-pipeline-run ' @@ -129,7 +127,7 @@ def step_activity_run_query_by_pipeline_run(test, rg, checks=None): # EXAMPLE: /Datasets/put/Datasets_Create @try_manual -def step_dataset_create(test, rg, checks=None): +def step_dataset_create(test, checks=None): if checks is None: checks = [] test.cmd('az datafactory dataset create ' @@ -144,26 +142,9 @@ def step_dataset_create(test, rg, checks=None): checks=checks) -# EXAMPLE: /Datasets/put/Datasets_Update -@try_manual -def step_dataset_update(test, rg, checks=None): - if checks is None: - checks = [] - test.cmd('az datafactory dataset update ' - '--description "Example description" ' - '--linked-service-name "{{\\"type\\":\\"LinkedServiceReference\\",\\"referenceName\\":\\"{myLinkedService}' - '\\"}}" ' - '--parameters "{{\\"MyFileName\\":{{\\"type\\":\\"String\\"}},\\"MyFolderPath\\":{{\\"type\\":\\"String\\"' - '}}}}" ' - '--name "{myDataset}" ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}"', - checks=checks) - - # EXAMPLE: /Datasets/get/Datasets_Get @try_manual -def step_dataset_show(test, rg, checks=None): +def step_dataset_show(test, checks=None): if checks is None: checks = [] test.cmd('az datafactory dataset show ' @@ -175,7 +156,7 @@ def step_dataset_show(test, rg, checks=None): # EXAMPLE: /Datasets/get/Datasets_ListByFactory @try_manual -def step_dataset_list(test, rg, checks=None): +def step_dataset_list(test, checks=None): if checks is None: checks = [] test.cmd('az datafactory dataset list ' @@ -186,7 +167,7 @@ def step_dataset_list(test, rg, checks=None): # EXAMPLE: /Datasets/delete/Datasets_Delete @try_manual -def step_dataset_delete(test, rg, checks=None): +def step_dataset_delete(test, checks=None): if checks is None: checks = [] test.cmd('az datafactory dataset delete -y ' @@ -198,7 +179,7 @@ def step_dataset_delete(test, rg, checks=None): # EXAMPLE: /IntegrationRuntimes/put/IntegrationRuntimes_Create @try_manual -def step_integration_runtime_self_hosted_create(test, rg, checks=None): +def step_integration_runtime_self_hosted_create(test, checks=None): if checks is None: checks = [] test.cmd('az datafactory integration-runtime self-hosted create ' @@ -211,7 +192,7 @@ def step_integration_runtime_self_hosted_create(test, rg, checks=None): # EXAMPLE: /IntegrationRuntimes/get/IntegrationRuntimes_Get @try_manual -def step_integration_runtime_show(test, rg, checks=None): +def step_integration_runtime_show(test, checks=None): if checks is None: checks = [] test.cmd('az datafactory integration-runtime show ' @@ -223,7 +204,7 @@ def step_integration_runtime_show(test, rg, checks=None): # EXAMPLE: /IntegrationRuntimes/get/IntegrationRuntimes_ListByFactory @try_manual -def step_integration_runtime_list(test, rg, checks=None): +def step_integration_runtime_list(test, checks=None): if checks is None: checks = [] test.cmd('az datafactory integration-runtime list ' @@ -234,7 +215,7 @@ def step_integration_runtime_list(test, rg, checks=None): # EXAMPLE: /IntegrationRuntimes/patch/IntegrationRuntimes_Update @try_manual -def step_integration_runtime_update(test, rg, checks=None): +def step_integration_runtime_update(test, checks=None): if checks is None: checks = [] test.cmd('az datafactory integration-runtime update ' @@ -248,7 +229,7 @@ def step_integration_runtime_update(test, rg, checks=None): # EXAMPLE: /IntegrationRuntimes/post/IntegrationRuntimes_CreateLinkedIntegrationRuntime @try_manual -def step_integration_runtime_linked(test, rg, checks=None): +def step_integration_runtime_linked(test, checks=None): if checks is None: checks = [] test.cmd('az datafactory integration-runtime linked-integration-runtime create ' @@ -264,7 +245,7 @@ def step_integration_runtime_linked(test, rg, checks=None): # EXAMPLE: /IntegrationRuntimes/post/IntegrationRuntimes_GetConnectionInfo @try_manual -def step_integration_runtime_get_connection_info(test, rg, checks=None): +def step_integration_runtime_get_connection_info(test, checks=None): if checks is None: checks = [] test.cmd('az datafactory integration-runtime get-connection-info ' @@ -276,7 +257,7 @@ def step_integration_runtime_get_connection_info(test, rg, checks=None): # EXAMPLE: /IntegrationRuntimes/post/IntegrationRuntimes_GetMonitoringData @try_manual -def step_integration_runtime_get_monitoring_data(test, rg, checks=None): +def step_integration_runtime_get_monitoring_data(test, checks=None): if checks is None: checks = [] test.cmd('az datafactory integration-runtime get-monitoring-data ' @@ -288,7 +269,7 @@ def step_integration_runtime_get_monitoring_data(test, rg, checks=None): # EXAMPLE: /IntegrationRuntimes/post/IntegrationRuntimes_GetStatus @try_manual -def step_integration_runtime_get_status(test, rg, checks=None): +def step_integration_runtime_get_status(test, checks=None): if checks is None: checks = [] test.cmd('az datafactory integration-runtime get-status ' @@ -300,7 +281,7 @@ def step_integration_runtime_get_status(test, rg, checks=None): # EXAMPLE: /IntegrationRuntimes/post/IntegrationRuntimes_ListAuthKeys @try_manual -def step_integration_runtime_list_auth_key(test, rg, checks=None): +def step_integration_runtime_list_auth_key(test, checks=None): if checks is None: checks = [] test.cmd('az datafactory integration-runtime list-auth-key ' @@ -312,7 +293,7 @@ def step_integration_runtime_list_auth_key(test, rg, checks=None): # EXAMPLE: /IntegrationRuntimes/post/IntegrationRuntimes_RegenerateAuthKey @try_manual -def step_integration_runtime_regenerate_auth_key(test, rg, checks=None): +def step_integration_runtime_regenerate_auth_key(test, checks=None): if checks is None: checks = [] test.cmd('az datafactory integration-runtime regenerate-auth-key ' @@ -325,7 +306,7 @@ def step_integration_runtime_regenerate_auth_key(test, rg, checks=None): # EXAMPLE: /IntegrationRuntimes/post/IntegrationRuntimes_Start @try_manual -def step_integration_runtime_start(test, rg, checks=None): +def step_integration_runtime_start(test, checks=None): if checks is None: checks = [] test.cmd('az datafactory integration-runtime start ' @@ -337,7 +318,7 @@ def step_integration_runtime_start(test, rg, checks=None): # EXAMPLE: /IntegrationRuntimes/post/IntegrationRuntimes_Stop @try_manual -def step_integration_runtime_stop(test, rg, checks=None): +def step_integration_runtime_stop(test, checks=None): if checks is None: checks = [] test.cmd('az datafactory integration-runtime stop ' @@ -349,7 +330,7 @@ def step_integration_runtime_stop(test, rg, checks=None): # EXAMPLE: /IntegrationRuntimes/post/IntegrationRuntimes_SyncCredentials @try_manual -def step_integration_runtime_sync_credentials(test, rg, checks=None): +def step_integration_runtime_sync_credentials(test, checks=None): if checks is None: checks = [] test.cmd('az datafactory integration-runtime sync-credentials ' @@ -361,7 +342,7 @@ def step_integration_runtime_sync_credentials(test, rg, checks=None): # EXAMPLE: /IntegrationRuntimes/post/IntegrationRuntimes_Upgrade @try_manual -def step_integration_runtime_remove_link(test, rg, checks=None): +def step_integration_runtime_remove_link(test, checks=None): if checks is None: checks = [] test.cmd('az datafactory integration-runtime remove-link ' @@ -374,7 +355,7 @@ def step_integration_runtime_remove_link(test, rg, checks=None): # EXAMPLE: /IntegrationRuntimeNodes/get/IntegrationRuntimeNodes_Get @try_manual -def step_integration_runtime_node_show(test, rg, checks=None): +def step_integration_runtime_node_show(test, checks=None): if checks is None: checks = [] test.cmd('az datafactory integration-runtime-node show ' @@ -387,7 +368,7 @@ def step_integration_runtime_node_show(test, rg, checks=None): # EXAMPLE: /IntegrationRuntimeNodes/patch/IntegrationRuntimeNodes_Update @try_manual -def step_integration_runtime_node_update(test, rg, checks=None): +def step_integration_runtime_node_update(test, checks=None): if checks is None: checks = [] test.cmd('az datafactory integration-runtime-node update ' @@ -401,7 +382,7 @@ def step_integration_runtime_node_update(test, rg, checks=None): # EXAMPLE: /IntegrationRuntimeNodes/post/IntegrationRuntimeNodes_GetIpAddress @try_manual -def step_integration_runtime_node_get_ip_address(test, rg, checks=None): +def step_integration_runtime_node_get_ip_address(test, checks=None): if checks is None: checks = [] test.cmd('az datafactory integration-runtime-node get-ip-address ' @@ -414,7 +395,7 @@ def step_integration_runtime_node_get_ip_address(test, rg, checks=None): # EXAMPLE: /IntegrationRuntimeNodes/delete/IntegrationRuntimesNodes_Delete @try_manual -def step_integration_runtime_node_delete(test, rg, checks=None): +def step_integration_runtime_node_delete(test, checks=None): if checks is None: checks = [] test.cmd('az datafactory integration-runtime-node delete -y ' @@ -427,7 +408,7 @@ def step_integration_runtime_node_delete(test, rg, checks=None): # EXAMPLE: /IntegrationRuntimes/delete/IntegrationRuntimes_Delete @try_manual -def step_integration_runtime_delete(test, rg, checks=None): +def step_integration_runtime_delete(test, checks=None): if checks is None: checks = [] test.cmd('az datafactory integration-runtime delete -y ' @@ -439,7 +420,7 @@ def step_integration_runtime_delete(test, rg, checks=None): # EXAMPLE: /LinkedServices/put/LinkedServices_Create @try_manual -def step_linked_service_create(test, rg, checks=None): +def step_linked_service_create(test, checks=None): if checks is None: checks = [] test.cmd('az datafactory linked-service create ' @@ -452,57 +433,133 @@ def step_linked_service_create(test, rg, checks=None): checks=checks) -# EXAMPLE: /LinkedServices/put/LinkedServices_Update +# EXAMPLE: /LinkedServices/get/LinkedServices_Get @try_manual -def step_linked_service_update(test, rg, checks=None): +def step_linked_service_show(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory linked-service update ' + test.cmd('az datafactory linked-service show ' '--factory-name "{myFactory}" ' - '--description "Example description" ' '--name "{myLinkedService}" ' '--resource-group "{rg}"', checks=checks) -# EXAMPLE: /LinkedServices/get/LinkedServices_Get +# EXAMPLE: /LinkedServices/get/LinkedServices_ListByFactory @try_manual -def step_linked_service_show(test, rg, checks=None): +def step_linked_service_list(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory linked-service show ' + test.cmd('az datafactory linked-service list ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /LinkedServices/delete/LinkedServices_Delete +@try_manual +def step_linked_service_delete(test, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory linked-service delete -y ' '--factory-name "{myFactory}" ' '--name "{myLinkedService}" ' '--resource-group "{rg}"', checks=checks) -# EXAMPLE: /LinkedServices/get/LinkedServices_ListByFactory +# EXAMPLE: /ManagedVirtualNetworks/put/ManagedVirtualNetworks_Create @try_manual -def step_linked_service_list(test, rg, checks=None): +def step_managed_virtual_network_create(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory linked-service list ' + test.cmd('az datafactory managed-virtual-network create ' '--factory-name "{myFactory}" ' + '--name "{myManagedVirtualNetwork}" ' '--resource-group "{rg}"', checks=checks) -# EXAMPLE: /LinkedServices/delete/LinkedServices_Delete +# EXAMPLE: /ManagedVirtualNetworks/get/ManagedVirtualNetworks_Get @try_manual -def step_linked_service_delete(test, rg, checks=None): +def step_managed_virtual_network_show(test, checks=None): if checks is None: checks = [] - test.cmd('az datafactory linked-service delete -y ' + test.cmd('az datafactory managed-virtual-network show ' + '--factory-name "{myFactory}" ' + '--name "{myManagedVirtualNetwork}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /ManagedVirtualNetworks/get/ManagedVirtualNetworks_ListByFactory +@try_manual +def step_managed_virtual_network_list(test, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory managed-virtual-network list ' '--factory-name "{myFactory}" ' - '--name "{myLinkedService}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /ManagedPrivateEndpoints/put/ManagedPrivateEndpoints_Create +@try_manual +def step_managed_private_endpoint_create(test, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory managed-private-endpoint create ' + '--factory-name "{myFactory}" ' + '--group-id "blob" ' + '--private-link-resource-id "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.Stor' + 'age/storageAccounts/{sa}" ' + '--name "{myManagedPrivateEndpoint}" ' + '--managed-virtual-network-name "{myManagedVirtualNetwork}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /ManagedPrivateEndpoints/get/ManagedPrivateEndpoints_Get +@try_manual +def step_managed_private_endpoint_show(test, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory managed-private-endpoint show ' + '--factory-name "{myFactory}" ' + '--name "{myManagedPrivateEndpoint}" ' + '--managed-virtual-network-name "{myManagedVirtualNetwork}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /ManagedPrivateEndpoints/get/ManagedPrivateEndpoints_ListByFactory +@try_manual +def step_managed_private_endpoint_list(test, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory managed-private-endpoint list ' + '--factory-name "{myFactory}" ' + '--managed-virtual-network-name "{myManagedVirtualNetwork}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /ManagedPrivateEndpoints/delete/ManagedPrivateEndpoints_Delete +@try_manual +def step_managed_private_endpoint_delete(test, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory managed-private-endpoint delete -y ' + '--factory-name "{myFactory}" ' + '--name "{myManagedPrivateEndpoint}" ' + '--managed-virtual-network-name "{myManagedVirtualNetwork}" ' '--resource-group "{rg}"', checks=checks) # EXAMPLE: /PipelineRuns/get/PipelineRuns_Get @try_manual -def step_pipeline_run_show(test, rg, checks=None): +def step_pipeline_run_show(test, checks=None): if checks is None: checks = [] test.cmd('az datafactory pipeline-run show ' @@ -514,7 +571,7 @@ def step_pipeline_run_show(test, rg, checks=None): # EXAMPLE: /PipelineRuns/post/PipelineRuns_Cancel @try_manual -def step_pipeline_run_cancel(test, rg, checks=None): +def step_pipeline_run_cancel(test, checks=None): if checks is None: checks = [] test.cmd('az datafactory pipeline-run cancel ' @@ -526,7 +583,7 @@ def step_pipeline_run_cancel(test, rg, checks=None): # EXAMPLE: /PipelineRuns/post/PipelineRuns_QueryByFactory @try_manual -def step_pipeline_run_query_by_factory(test, rg, checks=None): +def step_pipeline_run_query_by_factory(test, checks=None): if checks is None: checks = [] test.cmd('az datafactory pipeline-run query-by-factory ' @@ -540,7 +597,7 @@ def step_pipeline_run_query_by_factory(test, rg, checks=None): # EXAMPLE: /Pipelines/put/Pipelines_Create @try_manual -def step_pipeline_create(test, rg, checks=None): +def step_pipeline_create(test, checks=None): if checks is None: checks = [] test.cmd('az datafactory pipeline create ' @@ -564,7 +621,7 @@ def step_pipeline_create(test, rg, checks=None): # EXAMPLE: /Pipelines/put/Pipelines_Update @try_manual -def step_pipeline_update(test, rg, checks=None): +def step_pipeline_update(test, checks=None): if checks is None: checks = [] test.cmd('az datafactory pipeline update ' @@ -588,7 +645,7 @@ def step_pipeline_update(test, rg, checks=None): # EXAMPLE: /Pipelines/get/Pipelines_Get @try_manual -def step_pipeline_show(test, rg, checks=None): +def step_pipeline_show(test, checks=None): if checks is None: checks = [] test.cmd('az datafactory pipeline show ' @@ -600,7 +657,7 @@ def step_pipeline_show(test, rg, checks=None): # EXAMPLE: /Pipelines/get/Pipelines_ListByFactory @try_manual -def step_pipeline_list(test, rg, checks=None): +def step_pipeline_list(test, checks=None): if checks is None: checks = [] test.cmd('az datafactory pipeline list ' @@ -611,7 +668,7 @@ def step_pipeline_list(test, rg, checks=None): # EXAMPLE: /Pipelines/post/Pipelines_CreateRun @try_manual -def step_pipeline_create_run(test, rg, checks=None): +def step_pipeline_create_run(test, checks=None): if checks is None: checks = [] test.cmd('az datafactory pipeline create-run ' @@ -624,7 +681,7 @@ def step_pipeline_create_run(test, rg, checks=None): # EXAMPLE: /Pipelines/delete/Pipelines_Delete @try_manual -def step_pipeline_delete(test, rg, checks=None): +def step_pipeline_delete(test, checks=None): if checks is None: checks = [] test.cmd('az datafactory pipeline delete -y ' @@ -636,7 +693,7 @@ def step_pipeline_delete(test, rg, checks=None): # EXAMPLE: /Triggers/put/Triggers_Create @try_manual -def step_trigger_create(test, rg, checks=None): +def step_trigger_create(test, checks=None): if checks is None: checks = [] test.cmd('az datafactory trigger create ' @@ -651,22 +708,9 @@ def step_trigger_create(test, rg, checks=None): checks=checks) -# EXAMPLE: /Triggers/put/Triggers_Update -@try_manual -def step_trigger_update(test, rg, checks=None): - if checks is None: - checks = [] - test.cmd('az datafactory trigger update ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}" ' - '--description "Example description" ' - '--name "{myTrigger}"', - checks=checks) - - # EXAMPLE: /Triggers/get/Triggers_Get @try_manual -def step_trigger_show(test, rg, checks=None): +def step_trigger_show(test, checks=None): if checks is None: checks = [] test.cmd('az datafactory trigger show ' @@ -678,7 +722,7 @@ def step_trigger_show(test, rg, checks=None): # EXAMPLE: /Triggers/get/Triggers_ListByFactory @try_manual -def step_trigger_list(test, rg, checks=None): +def step_trigger_list(test, checks=None): if checks is None: checks = [] test.cmd('az datafactory trigger list ' @@ -689,7 +733,7 @@ def step_trigger_list(test, rg, checks=None): # EXAMPLE: /Triggers/post/Triggers_GetEventSubscriptionStatus @try_manual -def step_trigger_get_event_subscription_status(test, rg, checks=None): +def step_trigger_get_event_subscription_status(test, checks=None): if checks is None: checks = [] test.cmd('az datafactory trigger get-event-subscription-status ' @@ -701,7 +745,7 @@ def step_trigger_get_event_subscription_status(test, rg, checks=None): # EXAMPLE: /Triggers/post/Triggers_QueryByFactory @try_manual -def step_trigger_query_by_factory(test, rg, checks=None): +def step_trigger_query_by_factory(test, checks=None): if checks is None: checks = [] test.cmd('az datafactory trigger query-by-factory ' @@ -713,7 +757,7 @@ def step_trigger_query_by_factory(test, rg, checks=None): # EXAMPLE: /Triggers/post/Triggers_Start @try_manual -def step_trigger_start(test, rg, checks=None): +def step_trigger_start(test, checks=None): if checks is None: checks = [] test.cmd('az datafactory trigger start ' @@ -725,7 +769,7 @@ def step_trigger_start(test, rg, checks=None): # EXAMPLE: /Triggers/post/Triggers_Stop @try_manual -def step_trigger_stop(test, rg, checks=None): +def step_trigger_stop(test, checks=None): if checks is None: checks = [] test.cmd('az datafactory trigger stop ' @@ -737,7 +781,7 @@ def step_trigger_stop(test, rg, checks=None): # EXAMPLE: /Triggers/post/Triggers_SubscribeToEvents @try_manual -def step_trigger_subscribe_to_event(test, rg, checks=None): +def step_trigger_subscribe_to_event(test, checks=None): if checks is None: checks = [] test.cmd('az datafactory trigger subscribe-to-event ' @@ -749,7 +793,7 @@ def step_trigger_subscribe_to_event(test, rg, checks=None): # EXAMPLE: /Triggers/post/Triggers_UnsubscribeFromEvents @try_manual -def step_trigger_unsubscribe_from_event(test, rg, checks=None): +def step_trigger_unsubscribe_from_event(test, checks=None): if checks is None: checks = [] test.cmd('az datafactory trigger unsubscribe-from-event ' @@ -761,7 +805,7 @@ def step_trigger_unsubscribe_from_event(test, rg, checks=None): # EXAMPLE: /TriggerRuns/post/TriggerRuns_QueryByFactory @try_manual -def step_trigger_run_query_by_factory(test, rg, checks=None): +def step_trigger_run_query_by_factory(test, checks=None): if checks is None: checks = [] test.cmd('az datafactory trigger-run query-by-factory ' @@ -775,7 +819,7 @@ def step_trigger_run_query_by_factory(test, rg, checks=None): # EXAMPLE: /TriggerRuns/post/Triggers_Cancel @try_manual -def step_trigger_run_cancel(test, rg, checks=None): +def step_trigger_run_cancel(test, checks=None): if checks is None: checks = [] test.cmd('az datafactory trigger-run cancel ' @@ -788,7 +832,7 @@ def step_trigger_run_cancel(test, rg, checks=None): # EXAMPLE: /TriggerRuns/post/Triggers_Rerun @try_manual -def step_trigger_run_rerun(test, rg, checks=None): +def step_trigger_run_rerun(test, checks=None): if checks is None: checks = [] test.cmd('az datafactory trigger-run rerun ' @@ -801,7 +845,7 @@ def step_trigger_run_rerun(test, rg, checks=None): # EXAMPLE: /Triggers/delete/Triggers_Delete @try_manual -def step_trigger_delete(test, rg, checks=None): +def step_trigger_delete(test, checks=None): if checks is None: checks = [] test.cmd('az datafactory trigger delete -y ' @@ -813,7 +857,7 @@ def step_trigger_delete(test, rg, checks=None): # EXAMPLE: /Factories/delete/Factories_Delete @try_manual -def step_delete(test, rg, checks=None): +def step_delete(test, checks=None): if checks is None: checks = [] test.cmd('az datafactory delete -y ' diff --git a/src/datafactory/azext_datafactory/tests/latest/recordings/test_datafactory_Scenario.yaml b/src/datafactory/azext_datafactory/tests/latest/recordings/test_datafactory_main.yaml similarity index 100% rename from src/datafactory/azext_datafactory/tests/latest/recordings/test_datafactory_Scenario.yaml rename to src/datafactory/azext_datafactory/tests/latest/recordings/test_datafactory_main.yaml diff --git a/src/datafactory/azext_datafactory/tests/latest/recordings/test_datafactory_managedPrivateEndpoint.yaml b/src/datafactory/azext_datafactory/tests/latest/recordings/test_datafactory_managedPrivateEndpoint.yaml new file mode 100644 index 00000000000..f6596ff4246 --- /dev/null +++ b/src/datafactory/azext_datafactory/tests/latest/recordings/test_datafactory_managedPrivateEndpoint.yaml @@ -0,0 +1,536 @@ +interactions: +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - account list + Connection: + - keep-alive + ParameterSetName: + - --query -o + User-Agent: + - AZURECLI/2.27.0 azsdk-python-azure-mgmt-resource/18.0.0 Python/3.7.9 (Windows-10-10.0.19041-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/locations?api-version=2019-11-01 + response: + body: + string: "{\"value\":[{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus\",\"name\":\"eastus\",\"displayName\":\"East + US\",\"regionalDisplayName\":\"(US) East US\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-79.8164\",\"latitude\":\"37.3719\",\"physicalLocation\":\"Virginia\",\"pairedRegion\":[{\"name\":\"westus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2\",\"name\":\"eastus2\",\"displayName\":\"East + US 2\",\"regionalDisplayName\":\"(US) East US 2\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-78.3889\",\"latitude\":\"36.6681\",\"physicalLocation\":\"Virginia\",\"pairedRegion\":[{\"name\":\"centralus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus\",\"name\":\"southcentralus\",\"displayName\":\"South + Central US\",\"regionalDisplayName\":\"(US) South Central US\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-98.5\",\"latitude\":\"29.4167\",\"physicalLocation\":\"Texas\",\"pairedRegion\":[{\"name\":\"northcentralus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2\",\"name\":\"westus2\",\"displayName\":\"West + US 2\",\"regionalDisplayName\":\"(US) West US 2\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-119.852\",\"latitude\":\"47.233\",\"physicalLocation\":\"Washington\",\"pairedRegion\":[{\"name\":\"westcentralus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westcentralus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus3\",\"name\":\"westus3\",\"displayName\":\"West + US 3\",\"regionalDisplayName\":\"(US) West US 3\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-112.074036\",\"latitude\":\"33.448376\",\"physicalLocation\":\"Phoenix\",\"pairedRegion\":[{\"name\":\"eastus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiaeast\",\"name\":\"australiaeast\",\"displayName\":\"Australia + East\",\"regionalDisplayName\":\"(Asia Pacific) Australia East\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"151.2094\",\"latitude\":\"-33.86\",\"physicalLocation\":\"New + South Wales\",\"pairedRegion\":[{\"name\":\"australiasoutheast\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiasoutheast\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasia\",\"name\":\"southeastasia\",\"displayName\":\"Southeast + Asia\",\"regionalDisplayName\":\"(Asia Pacific) Southeast Asia\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"103.833\",\"latitude\":\"1.283\",\"physicalLocation\":\"Singapore\",\"pairedRegion\":[{\"name\":\"eastasia\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasia\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northeurope\",\"name\":\"northeurope\",\"displayName\":\"North + Europe\",\"regionalDisplayName\":\"(Europe) North Europe\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"-6.2597\",\"latitude\":\"53.3478\",\"physicalLocation\":\"Ireland\",\"pairedRegion\":[{\"name\":\"westeurope\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westeurope\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/swedencentral\",\"name\":\"swedencentral\",\"displayName\":\"Sweden + Central\",\"regionalDisplayName\":\"(Europe) Sweden Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"17.14127\",\"latitude\":\"60.67488\",\"physicalLocation\":\"G\xE4vle\",\"pairedRegion\":[{\"name\":\"swedensouth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/swedensouth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uksouth\",\"name\":\"uksouth\",\"displayName\":\"UK + South\",\"regionalDisplayName\":\"(Europe) UK South\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"-0.799\",\"latitude\":\"50.941\",\"physicalLocation\":\"London\",\"pairedRegion\":[{\"name\":\"ukwest\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/ukwest\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westeurope\",\"name\":\"westeurope\",\"displayName\":\"West + Europe\",\"regionalDisplayName\":\"(Europe) West Europe\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"4.9\",\"latitude\":\"52.3667\",\"physicalLocation\":\"Netherlands\",\"pairedRegion\":[{\"name\":\"northeurope\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northeurope\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralus\",\"name\":\"centralus\",\"displayName\":\"Central + US\",\"regionalDisplayName\":\"(US) Central US\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-93.6208\",\"latitude\":\"41.5908\",\"physicalLocation\":\"Iowa\",\"pairedRegion\":[{\"name\":\"eastus2\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralus\",\"name\":\"northcentralus\",\"displayName\":\"North + Central US\",\"regionalDisplayName\":\"(US) North Central US\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-87.6278\",\"latitude\":\"41.8819\",\"physicalLocation\":\"Illinois\",\"pairedRegion\":[{\"name\":\"southcentralus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus\",\"name\":\"westus\",\"displayName\":\"West + US\",\"regionalDisplayName\":\"(US) West US\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"US\",\"longitude\":\"-122.417\",\"latitude\":\"37.783\",\"physicalLocation\":\"California\",\"pairedRegion\":[{\"name\":\"eastus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricanorth\",\"name\":\"southafricanorth\",\"displayName\":\"South + Africa North\",\"regionalDisplayName\":\"(Africa) South Africa North\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Africa\",\"longitude\":\"28.218370\",\"latitude\":\"-25.731340\",\"physicalLocation\":\"Johannesburg\",\"pairedRegion\":[{\"name\":\"southafricawest\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricawest\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralindia\",\"name\":\"centralindia\",\"displayName\":\"Central + India\",\"regionalDisplayName\":\"(Asia Pacific) Central India\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"73.9197\",\"latitude\":\"18.5822\",\"physicalLocation\":\"Pune\",\"pairedRegion\":[{\"name\":\"southindia\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasia\",\"name\":\"eastasia\",\"displayName\":\"East + Asia\",\"regionalDisplayName\":\"(Asia Pacific) East Asia\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"114.188\",\"latitude\":\"22.267\",\"physicalLocation\":\"Hong + Kong\",\"pairedRegion\":[{\"name\":\"southeastasia\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasia\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japaneast\",\"name\":\"japaneast\",\"displayName\":\"Japan + East\",\"regionalDisplayName\":\"(Asia Pacific) Japan East\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"139.77\",\"latitude\":\"35.68\",\"physicalLocation\":\"Tokyo, + Saitama\",\"pairedRegion\":[{\"name\":\"japanwest\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japanwest\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiawest\",\"name\":\"jioindiawest\",\"displayName\":\"Jio + India West\",\"regionalDisplayName\":\"(Asia Pacific) Jio India West\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"70.05773\",\"latitude\":\"22.470701\",\"physicalLocation\":\"Jamnagar\",\"pairedRegion\":[{\"name\":\"jioindiacentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiacentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreacentral\",\"name\":\"koreacentral\",\"displayName\":\"Korea + Central\",\"regionalDisplayName\":\"(Asia Pacific) Korea Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"126.9780\",\"latitude\":\"37.5665\",\"physicalLocation\":\"Seoul\",\"pairedRegion\":[{\"name\":\"koreasouth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreasouth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadacentral\",\"name\":\"canadacentral\",\"displayName\":\"Canada + Central\",\"regionalDisplayName\":\"(Canada) Canada Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Canada\",\"longitude\":\"-79.383\",\"latitude\":\"43.653\",\"physicalLocation\":\"Toronto\",\"pairedRegion\":[{\"name\":\"canadaeast\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadaeast\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francecentral\",\"name\":\"francecentral\",\"displayName\":\"France + Central\",\"regionalDisplayName\":\"(Europe) France Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"2.3730\",\"latitude\":\"46.3772\",\"physicalLocation\":\"Paris\",\"pairedRegion\":[{\"name\":\"francesouth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francesouth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanywestcentral\",\"name\":\"germanywestcentral\",\"displayName\":\"Germany + West Central\",\"regionalDisplayName\":\"(Europe) Germany West Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"8.682127\",\"latitude\":\"50.110924\",\"physicalLocation\":\"Frankfurt\",\"pairedRegion\":[{\"name\":\"germanynorth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanynorth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwayeast\",\"name\":\"norwayeast\",\"displayName\":\"Norway + East\",\"regionalDisplayName\":\"(Europe) Norway East\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"10.752245\",\"latitude\":\"59.913868\",\"physicalLocation\":\"Norway\",\"pairedRegion\":[{\"name\":\"norwaywest\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwaywest\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandnorth\",\"name\":\"switzerlandnorth\",\"displayName\":\"Switzerland + North\",\"regionalDisplayName\":\"(Europe) Switzerland North\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Europe\",\"longitude\":\"8.564572\",\"latitude\":\"47.451542\",\"physicalLocation\":\"Zurich\",\"pairedRegion\":[{\"name\":\"switzerlandwest\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandwest\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaenorth\",\"name\":\"uaenorth\",\"displayName\":\"UAE + North\",\"regionalDisplayName\":\"(Middle East) UAE North\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"Middle + East\",\"longitude\":\"55.316666\",\"latitude\":\"25.266666\",\"physicalLocation\":\"Dubai\",\"pairedRegion\":[{\"name\":\"uaecentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaecentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsouth\",\"name\":\"brazilsouth\",\"displayName\":\"Brazil + South\",\"regionalDisplayName\":\"(South America) Brazil South\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Recommended\",\"geographyGroup\":\"South + America\",\"longitude\":\"-46.633\",\"latitude\":\"-23.55\",\"physicalLocation\":\"Sao + Paulo State\",\"pairedRegion\":[{\"name\":\"southcentralus\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralusstage\",\"name\":\"centralusstage\",\"displayName\":\"Central + US (Stage)\",\"regionalDisplayName\":\"(US) Central US (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusstage\",\"name\":\"eastusstage\",\"displayName\":\"East + US (Stage)\",\"regionalDisplayName\":\"(US) East US (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2stage\",\"name\":\"eastus2stage\",\"displayName\":\"East + US 2 (Stage)\",\"regionalDisplayName\":\"(US) East US 2 (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralusstage\",\"name\":\"northcentralusstage\",\"displayName\":\"North + Central US (Stage)\",\"regionalDisplayName\":\"(US) North Central US (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralusstage\",\"name\":\"southcentralusstage\",\"displayName\":\"South + Central US (Stage)\",\"regionalDisplayName\":\"(US) South Central US (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westusstage\",\"name\":\"westusstage\",\"displayName\":\"West + US (Stage)\",\"regionalDisplayName\":\"(US) West US (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2stage\",\"name\":\"westus2stage\",\"displayName\":\"West + US 2 (Stage)\",\"regionalDisplayName\":\"(US) West US 2 (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/asia\",\"name\":\"asia\",\"displayName\":\"Asia\",\"regionalDisplayName\":\"Asia\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/asiapacific\",\"name\":\"asiapacific\",\"displayName\":\"Asia + Pacific\",\"regionalDisplayName\":\"Asia Pacific\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australia\",\"name\":\"australia\",\"displayName\":\"Australia\",\"regionalDisplayName\":\"Australia\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazil\",\"name\":\"brazil\",\"displayName\":\"Brazil\",\"regionalDisplayName\":\"Brazil\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canada\",\"name\":\"canada\",\"displayName\":\"Canada\",\"regionalDisplayName\":\"Canada\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/europe\",\"name\":\"europe\",\"displayName\":\"Europe\",\"regionalDisplayName\":\"Europe\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/global\",\"name\":\"global\",\"displayName\":\"Global\",\"regionalDisplayName\":\"Global\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/india\",\"name\":\"india\",\"displayName\":\"India\",\"regionalDisplayName\":\"India\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japan\",\"name\":\"japan\",\"displayName\":\"Japan\",\"regionalDisplayName\":\"Japan\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uk\",\"name\":\"uk\",\"displayName\":\"United + Kingdom\",\"regionalDisplayName\":\"United Kingdom\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/unitedstates\",\"name\":\"unitedstates\",\"displayName\":\"United + States\",\"regionalDisplayName\":\"United States\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasiastage\",\"name\":\"eastasiastage\",\"displayName\":\"East + Asia (Stage)\",\"regionalDisplayName\":\"(Asia Pacific) East Asia (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasiastage\",\"name\":\"southeastasiastage\",\"displayName\":\"Southeast + Asia (Stage)\",\"regionalDisplayName\":\"(Asia Pacific) Southeast Asia (Stage)\",\"metadata\":{\"regionType\":\"Logical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\"}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centraluseuap\",\"name\":\"centraluseuap\",\"displayName\":\"Central + US EUAP\",\"regionalDisplayName\":\"(US) Central US EUAP\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\",\"longitude\":\"-93.6208\",\"latitude\":\"41.5908\",\"pairedRegion\":[{\"name\":\"eastus2euap\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2euap\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2euap\",\"name\":\"eastus2euap\",\"displayName\":\"East + US 2 EUAP\",\"regionalDisplayName\":\"(US) East US 2 EUAP\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\",\"longitude\":\"-78.3889\",\"latitude\":\"36.6681\",\"pairedRegion\":[{\"name\":\"centraluseuap\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centraluseuap\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westcentralus\",\"name\":\"westcentralus\",\"displayName\":\"West + Central US\",\"regionalDisplayName\":\"(US) West Central US\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"US\",\"longitude\":\"-110.234\",\"latitude\":\"40.890\",\"physicalLocation\":\"Wyoming\",\"pairedRegion\":[{\"name\":\"westus2\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricawest\",\"name\":\"southafricawest\",\"displayName\":\"South + Africa West\",\"regionalDisplayName\":\"(Africa) South Africa West\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Africa\",\"longitude\":\"18.843266\",\"latitude\":\"-34.075691\",\"physicalLocation\":\"Cape + Town\",\"pairedRegion\":[{\"name\":\"southafricanorth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricanorth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral\",\"name\":\"australiacentral\",\"displayName\":\"Australia + Central\",\"regionalDisplayName\":\"(Asia Pacific) Australia Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"149.1244\",\"latitude\":\"-35.3075\",\"physicalLocation\":\"Canberra\",\"pairedRegion\":[{\"name\":\"australiacentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral2\",\"name\":\"australiacentral2\",\"displayName\":\"Australia + Central 2\",\"regionalDisplayName\":\"(Asia Pacific) Australia Central 2\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"149.1244\",\"latitude\":\"-35.3075\",\"physicalLocation\":\"Canberra\",\"pairedRegion\":[{\"name\":\"australiacentral2\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral2\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiasoutheast\",\"name\":\"australiasoutheast\",\"displayName\":\"Australia + Southeast\",\"regionalDisplayName\":\"(Asia Pacific) Australia Southeast\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"144.9631\",\"latitude\":\"-37.8136\",\"physicalLocation\":\"Victoria\",\"pairedRegion\":[{\"name\":\"australiaeast\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiaeast\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japanwest\",\"name\":\"japanwest\",\"displayName\":\"Japan + West\",\"regionalDisplayName\":\"(Asia Pacific) Japan West\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"135.5022\",\"latitude\":\"34.6939\",\"physicalLocation\":\"Osaka\",\"pairedRegion\":[{\"name\":\"japaneast\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japaneast\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiacentral\",\"name\":\"jioindiacentral\",\"displayName\":\"Jio + India Central\",\"regionalDisplayName\":\"(Asia Pacific) Jio India Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"79.08886\",\"latitude\":\"21.146633\",\"physicalLocation\":\"Nagpur\",\"pairedRegion\":[{\"name\":\"jioindiawest\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiawest\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreasouth\",\"name\":\"koreasouth\",\"displayName\":\"Korea + South\",\"regionalDisplayName\":\"(Asia Pacific) Korea South\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"129.0756\",\"latitude\":\"35.1796\",\"physicalLocation\":\"Busan\",\"pairedRegion\":[{\"name\":\"koreacentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreacentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia\",\"name\":\"southindia\",\"displayName\":\"South + India\",\"regionalDisplayName\":\"(Asia Pacific) South India\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"80.1636\",\"latitude\":\"12.9822\",\"physicalLocation\":\"Chennai\",\"pairedRegion\":[{\"name\":\"centralindia\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralindia\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westindia\",\"name\":\"westindia\",\"displayName\":\"West + India\",\"regionalDisplayName\":\"(Asia Pacific) West India\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Asia + Pacific\",\"longitude\":\"72.868\",\"latitude\":\"19.088\",\"physicalLocation\":\"Mumbai\",\"pairedRegion\":[{\"name\":\"southindia\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadaeast\",\"name\":\"canadaeast\",\"displayName\":\"Canada + East\",\"regionalDisplayName\":\"(Canada) Canada East\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Canada\",\"longitude\":\"-71.217\",\"latitude\":\"46.817\",\"physicalLocation\":\"Quebec\",\"pairedRegion\":[{\"name\":\"canadacentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadacentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francesouth\",\"name\":\"francesouth\",\"displayName\":\"France + South\",\"regionalDisplayName\":\"(Europe) France South\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Europe\",\"longitude\":\"2.1972\",\"latitude\":\"43.8345\",\"physicalLocation\":\"Marseille\",\"pairedRegion\":[{\"name\":\"francecentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francecentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanynorth\",\"name\":\"germanynorth\",\"displayName\":\"Germany + North\",\"regionalDisplayName\":\"(Europe) Germany North\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Europe\",\"longitude\":\"8.806422\",\"latitude\":\"53.073635\",\"physicalLocation\":\"Berlin\",\"pairedRegion\":[{\"name\":\"germanywestcentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanywestcentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwaywest\",\"name\":\"norwaywest\",\"displayName\":\"Norway + West\",\"regionalDisplayName\":\"(Europe) Norway West\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Europe\",\"longitude\":\"5.733107\",\"latitude\":\"58.969975\",\"physicalLocation\":\"Norway\",\"pairedRegion\":[{\"name\":\"norwayeast\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwayeast\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/swedensouth\",\"name\":\"swedensouth\",\"displayName\":\"Sweden + South\",\"regionalDisplayName\":\"(Europe) Sweden South\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Europe\",\"longitude\":\"13.0007\",\"latitude\":\"55.6059\",\"physicalLocation\":\"Malmo\",\"pairedRegion\":[{\"name\":\"swedencentral\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/swedencentral\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandwest\",\"name\":\"switzerlandwest\",\"displayName\":\"Switzerland + West\",\"regionalDisplayName\":\"(Europe) Switzerland West\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Europe\",\"longitude\":\"6.143158\",\"latitude\":\"46.204391\",\"physicalLocation\":\"Geneva\",\"pairedRegion\":[{\"name\":\"switzerlandnorth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandnorth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/ukwest\",\"name\":\"ukwest\",\"displayName\":\"UK + West\",\"regionalDisplayName\":\"(Europe) UK West\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Europe\",\"longitude\":\"-3.084\",\"latitude\":\"53.427\",\"physicalLocation\":\"Cardiff\",\"pairedRegion\":[{\"name\":\"uksouth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uksouth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaecentral\",\"name\":\"uaecentral\",\"displayName\":\"UAE + Central\",\"regionalDisplayName\":\"(Middle East) UAE Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Middle + East\",\"longitude\":\"54.366669\",\"latitude\":\"24.466667\",\"physicalLocation\":\"Abu + Dhabi\",\"pairedRegion\":[{\"name\":\"uaenorth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaenorth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsoutheast\",\"name\":\"brazilsoutheast\",\"displayName\":\"Brazil + Southeast\",\"regionalDisplayName\":\"(South America) Brazil Southeast\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"South + America\",\"longitude\":\"-43.2075\",\"latitude\":\"-22.90278\",\"physicalLocation\":\"Rio\",\"pairedRegion\":[{\"name\":\"brazilsouth\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsouth\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusslv\",\"name\":\"eastusslv\",\"displayName\":\"East + US SLV\",\"regionalDisplayName\":\"(South America) East US SLV\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"South + America\",\"longitude\":\"-43.2075\",\"latitude\":\"-22.90278\",\"physicalLocation\":\"Silverstone\",\"pairedRegion\":[{\"name\":\"eastusslv\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusslv\"}]}},{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/qatarcentral\",\"name\":\"qatarcentral\",\"displayName\":\"Qatar + Central\",\"regionalDisplayName\":\"(Europe) Qatar Central\",\"metadata\":{\"regionType\":\"Physical\",\"regionCategory\":\"Other\",\"geographyGroup\":\"Europe\",\"longitude\":\"51.439327\",\"latitude\":\"25.551462\",\"physicalLocation\":\"Doha\",\"pairedRegion\":[{\"name\":\"westeurope\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westeurope\"}]}}]}" + headers: + cache-control: + - no-cache + content-length: + - '28399' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 16 Aug 2021 07:27:17 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: '{"location": "eastus", "identity": {"type": "SystemAssigned"}, "properties": + {"encryption": {}}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory create + Connection: + - keep-alive + Content-Length: + - '96' + Content-Type: + - application/json + ParameterSetName: + - --location --name --resource-group + User-Agent: + - AZURECLI/2.27.0 azsdk-python-mgmt-datafactory/1.0.0 Python/3.7.9 (Windows-10-10.0.19041-SP0) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000004/providers/Microsoft.DataFactory/factories/exampleFa000001?api-version=2018-06-01 + response: + body: + string: '{"name":"exampleFa000001","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000004/providers/Microsoft.DataFactory/factories/examplefaag7y7rdu5","type":"Microsoft.DataFactory/factories","properties":{"provisioningState":"Succeeded","createTime":"2021-08-16T07:27:31.2087066Z","version":"2018-06-01","encryption":{}},"eTag":"\"08004aba-0000-0100-0000-611a13630000\"","location":"eastus","identity":{"type":"SystemAssigned","principalId":"e059b2b7-5d2c-44f5-81c2-3662b3cbdeb4","tenantId":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a"},"tags":{}}' + headers: + cache-control: + - no-cache + content-length: + - '631' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 16 Aug 2021 07:27:33 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: '{"properties": {}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory managed-virtual-network create + Connection: + - keep-alive + Content-Length: + - '18' + Content-Type: + - application/json + ParameterSetName: + - --factory-name --name --resource-group + User-Agent: + - AZURECLI/2.27.0 azsdk-python-mgmt-datafactory/1.0.0 Python/3.7.9 (Windows-10-10.0.19041-SP0) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000004/providers/Microsoft.DataFactory/factories/exampleFa000001/managedVirtualNetworks/exampleManagedVi000002?api-version=2018-06-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000004/providers/Microsoft.DataFactory/factories/exampleFa000001/managedvirtualnetworks/exampleManagedVi000002","name":"exampleManagedVi000002","type":"Microsoft.DataFactory/factories/managedvirtualnetworks","properties":{"vNetId":"3eb3a45b-f6c4-4ab8-887d-88eaf048162b","preventDataExfiltration":false,"alias":"examplefaag7y7rdu5"},"etag":"07001564-0000-0100-0000-611a13680000"}' + headers: + cache-control: + - no-cache + content-length: + - '544' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 16 Aug 2021 07:27:36 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory managed-virtual-network list + Connection: + - keep-alive + ParameterSetName: + - --factory-name --resource-group + User-Agent: + - AZURECLI/2.27.0 azsdk-python-mgmt-datafactory/1.0.0 Python/3.7.9 (Windows-10-10.0.19041-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000004/providers/Microsoft.DataFactory/factories/exampleFa000001/managedVirtualNetworks?api-version=2018-06-01 + response: + body: + string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000004/providers/Microsoft.DataFactory/factories/exampleFa000001/managedvirtualnetworks/exampleManagedVi000002","name":"exampleManagedVi000002","type":"Microsoft.DataFactory/factories/managedvirtualnetworks","properties":{"vNetId":"3eb3a45b-f6c4-4ab8-887d-88eaf048162b","preventDataExfiltration":false,"alias":"examplefaag7y7rdu5"},"etag":"07001564-0000-0100-0000-611a13680000"}]}' + headers: + cache-control: + - no-cache + content-length: + - '556' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 16 Aug 2021 07:27:37 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory managed-virtual-network show + Connection: + - keep-alive + ParameterSetName: + - --factory-name --name --resource-group + User-Agent: + - AZURECLI/2.27.0 azsdk-python-mgmt-datafactory/1.0.0 Python/3.7.9 (Windows-10-10.0.19041-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000004/providers/Microsoft.DataFactory/factories/exampleFa000001/managedVirtualNetworks/exampleManagedVi000002?api-version=2018-06-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000004/providers/Microsoft.DataFactory/factories/exampleFa000001/managedvirtualnetworks/exampleManagedVi000002","name":"exampleManagedVi000002","type":"Microsoft.DataFactory/factories/managedvirtualnetworks","properties":{"vNetId":"3eb3a45b-f6c4-4ab8-887d-88eaf048162b","preventDataExfiltration":false,"alias":"examplefaag7y7rdu5"},"etag":"07001564-0000-0100-0000-611a13680000"}' + headers: + cache-control: + - no-cache + content-length: + - '544' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 16 Aug 2021 07:27:38 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: '{"properties": {"groupId": "blob", "privateLinkResourceId": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000004/providers/Microsoft.Storage/storageAccounts/clitest000005"}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory managed-private-endpoint create + Connection: + - keep-alive + Content-Length: + - '275' + Content-Type: + - application/json + ParameterSetName: + - --factory-name --group-id --private-link-resource-id --name --managed-virtual-network-name + --resource-group + User-Agent: + - AZURECLI/2.27.0 azsdk-python-mgmt-datafactory/1.0.0 Python/3.7.9 (Windows-10-10.0.19041-SP0) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000004/providers/Microsoft.DataFactory/factories/exampleFa000001/managedVirtualNetworks/exampleManagedVi000002/managedPrivateEndpoints/exampleManagedPr000003?api-version=2018-06-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000004/providers/Microsoft.DataFactory/factories/exampleFa000001/managedvirtualnetworks/exampleManagedVi000002/managedprivateendpoints/exampleManagedPr000003","name":"exampleManagedPr000003","type":"Microsoft.DataFactory/factories/managedvirtualnetworks/managedprivateendpoints","properties":{"provisioningState":"Provisioning","privateLinkResourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000004/providers/Microsoft.Storage/storageAccounts/clitest000005","groupId":"blob","fqdns":[],"connectionState":{"status":"","description":"","actionsRequired":""}}}' + headers: + cache-control: + - no-cache + content-length: + - '843' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 16 Aug 2021 07:27:39 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1198' + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory managed-private-endpoint list + Connection: + - keep-alive + ParameterSetName: + - --factory-name --managed-virtual-network-name --resource-group + User-Agent: + - AZURECLI/2.27.0 azsdk-python-mgmt-datafactory/1.0.0 Python/3.7.9 (Windows-10-10.0.19041-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000004/providers/Microsoft.DataFactory/factories/exampleFa000001/managedVirtualNetworks/exampleManagedVi000002/managedPrivateEndpoints?api-version=2018-06-01 + response: + body: + string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000004/providers/Microsoft.DataFactory/factories/exampleFa000001/managedvirtualnetworks/exampleManagedVi000002/managedprivateendpoints/exampleManagedPr000003","name":"exampleManagedPr000003","type":"Microsoft.DataFactory/factories/managedvirtualnetworks/managedprivateendpoints","properties":{"provisioningState":"Provisioning","privateLinkResourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000004/providers/Microsoft.Storage/storageAccounts/clitest000005","groupId":"blob","fqdns":[],"connectionState":{"status":"","description":"","actionsRequired":""}}}]}' + headers: + cache-control: + - no-cache + content-length: + - '855' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 16 Aug 2021 07:27:41 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory managed-private-endpoint show + Connection: + - keep-alive + ParameterSetName: + - --factory-name --name --managed-virtual-network-name --resource-group + User-Agent: + - AZURECLI/2.27.0 azsdk-python-mgmt-datafactory/1.0.0 Python/3.7.9 (Windows-10-10.0.19041-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000004/providers/Microsoft.DataFactory/factories/exampleFa000001/managedVirtualNetworks/exampleManagedVi000002/managedPrivateEndpoints/exampleManagedPr000003?api-version=2018-06-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000004/providers/Microsoft.DataFactory/factories/exampleFa000001/managedvirtualnetworks/exampleManagedVi000002/managedprivateendpoints/exampleManagedPr000003","name":"exampleManagedPr000003","type":"Microsoft.DataFactory/factories/managedvirtualnetworks/managedprivateendpoints","properties":{"provisioningState":"Provisioning","privateLinkResourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000004/providers/Microsoft.Storage/storageAccounts/clitest000005","groupId":"blob","fqdns":[],"connectionState":{"status":"","description":"","actionsRequired":""}}}' + headers: + cache-control: + - no-cache + content-length: + - '843' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 16 Aug 2021 07:27:42 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory delete + Connection: + - keep-alive + Content-Length: + - '0' + ParameterSetName: + - -y --name --resource-group + User-Agent: + - AZURECLI/2.27.0 azsdk-python-mgmt-datafactory/1.0.0 Python/3.7.9 (Windows-10-10.0.19041-SP0) + method: DELETE + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000004/providers/Microsoft.DataFactory/factories/exampleFa000001?api-version=2018-06-01 + response: + body: + string: '' + headers: + cache-control: + - no-cache + content-length: + - '0' + date: + - Mon, 16 Aug 2021 07:27:48 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-deletes: + - '14999' + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +version: 1 diff --git a/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario.py b/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario.py index 517a35650f8..6bfdf3885a3 100644 --- a/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario.py +++ b/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario.py @@ -11,16 +11,14 @@ import os from azure.cli.testsdk import ScenarioTest from azure.cli.testsdk import ResourceGroupPreparer +from azure.cli.testsdk import StorageAccountPreparer from .example_steps import step_create from .example_steps import step_update from .example_steps import step_linked_service_create -from .example_steps import step_linked_service_update from .example_steps import step_dataset_create -from .example_steps import step_dataset_update from .example_steps import step_pipeline_create from .example_steps import step_pipeline_update from .example_steps import step_trigger_create -from .example_steps import step_trigger_update from .example_steps import step_integration_runtime_self_hosted_create from .example_steps import step_integration_runtime_update from .example_steps import step_integration_runtime_linked @@ -66,6 +64,12 @@ from .example_steps import step_dataset_delete from .example_steps import step_linked_service_delete from .example_steps import step_delete +from .example_steps import step_managed_virtual_network_create +from .example_steps import step_managed_virtual_network_list +from .example_steps import step_managed_virtual_network_show +from .example_steps import step_managed_private_endpoint_create +from .example_steps import step_managed_private_endpoint_list +from .example_steps import step_managed_private_endpoint_show from .. import ( try_manual, raise_if, @@ -76,92 +80,91 @@ TEST_DIR = os.path.abspath(os.path.join(os.path.abspath(__file__), '..')) -# Env setup_scenario +# Env setup_main @try_manual -def setup_scenario(test, rg): +def setup_main(test): pass -# Env cleanup_scenario +# Env cleanup_main @try_manual -def cleanup_scenario(test, rg): +def cleanup_main(test): pass -# Testcase: Scenario +# Testcase: main @try_manual -def call_scenario(test, rg): - setup_scenario(test, rg) - step_create(test, rg, checks=[]) - step_update(test, rg, checks=[]) - step_linked_service_create(test, rg, checks=[]) - step_linked_service_update(test, rg, checks=[]) - step_dataset_create(test, rg, checks=[]) - step_dataset_update(test, rg, checks=[]) - step_pipeline_create(test, rg, checks=[]) - step_pipeline_update(test, rg, checks=[]) - step_trigger_create(test, rg, checks=[]) - step_trigger_update(test, rg, checks=[]) - step_integration_runtime_self_hosted_create(test, rg, checks=[]) - step_integration_runtime_update(test, rg, checks=[]) - step_integration_runtime_linked(test, rg, checks=[]) - step_pipeline_create_run(test, rg, checks=[]) - step_integration_runtime_show(test, rg, checks=[]) +def call_main(test): + setup_main(test) + step_create(test, checks=[]) + step_update(test, checks=[]) + step_linked_service_create(test, checks=[]) + # STEP NOT FOUND: LinkedServices_Update + step_dataset_create(test, checks=[]) + # STEP NOT FOUND: Datasets_Update + step_pipeline_create(test, checks=[]) + step_pipeline_update(test, checks=[]) + step_trigger_create(test, checks=[]) + # STEP NOT FOUND: Triggers_Update + step_integration_runtime_self_hosted_create(test, checks=[]) + step_integration_runtime_update(test, checks=[]) + step_integration_runtime_linked(test, checks=[]) + step_pipeline_create_run(test, checks=[]) + step_integration_runtime_show(test, checks=[]) # STEP NOT FOUND: RerunTriggers_ListByTrigger - step_linked_service_show(test, rg, checks=[]) - step_pipeline_run_show(test, rg, checks=[]) - step_pipeline_show(test, rg, checks=[]) - step_dataset_show(test, rg, checks=[]) - step_trigger_show(test, rg, checks=[]) - step_integration_runtime_list(test, rg, checks=[]) - step_linked_service_list(test, rg, checks=[]) - step_pipeline_list(test, rg, checks=[]) - step_trigger_list(test, rg, checks=[]) - step_dataset_list(test, rg, checks=[]) - step_show(test, rg, checks=[]) - step_list2(test, rg, checks=[]) - step_list(test, rg, checks=[]) + step_linked_service_show(test, checks=[]) + step_pipeline_run_show(test, checks=[]) + step_pipeline_show(test, checks=[]) + step_dataset_show(test, checks=[]) + step_trigger_show(test, checks=[]) + step_integration_runtime_list(test, checks=[]) + step_linked_service_list(test, checks=[]) + step_pipeline_list(test, checks=[]) + step_trigger_list(test, checks=[]) + step_dataset_list(test, checks=[]) + step_show(test, checks=[]) + step_list2(test, checks=[]) + step_list(test, checks=[]) # STEP NOT FOUND: Operations_List # STEP NOT FOUND: RerunTriggers_Cancel # STEP NOT FOUND: RerunTriggers_Start # STEP NOT FOUND: RerunTriggers_Stop - step_integration_runtime_regenerate_auth_key(test, rg, checks=[]) + step_integration_runtime_regenerate_auth_key(test, checks=[]) # STEP NOT FOUND: TriggerRuns_Rerun - step_integration_runtime_get_connection_info(test, rg, checks=[]) - step_integration_runtime_sync_credentials(test, rg, checks=[]) - step_integration_runtime_get_monitoring_data(test, rg, checks=[]) - step_integration_runtime_list_auth_key(test, rg, checks=[]) - step_integration_runtime_remove_link(test, rg, checks=[]) - step_integration_runtime_get_status(test, rg, checks=[]) - step_integration_runtime_start(test, rg, checks=[]) - step_integration_runtime_stop(test, rg, checks=[]) - step_trigger_get_event_subscription_status(test, rg, checks=[]) - step_activity_run_query_by_pipeline_run(test, rg, checks=[]) - step_trigger_unsubscribe_from_event(test, rg, checks=[]) - step_trigger_subscribe_to_event(test, rg, checks=[]) - step_trigger_start(test, rg, checks=[]) - step_trigger_stop(test, rg, checks=[]) - step_get_git_hub_access_token(test, rg, checks=[]) - step_get_data_plane_access(test, rg, checks=[]) - step_pipeline_run_query_by_factory(test, rg, checks=[]) - step_pipeline_run_cancel(test, rg, checks=[]) - step_trigger_run_query_by_factory(test, rg, checks=[]) - step_configure_factory_repo(test, rg, checks=[]) - step_integration_runtime_delete(test, rg, checks=[]) - step_trigger_delete(test, rg, checks=[]) - step_pipeline_delete(test, rg, checks=[]) - step_dataset_delete(test, rg, checks=[]) - step_linked_service_delete(test, rg, checks=[]) - step_delete(test, rg, checks=[]) - cleanup_scenario(test, rg) - - -# Test class for Scenario -@try_manual -class DatafactoryScenarioTest(ScenarioTest): + step_integration_runtime_get_connection_info(test, checks=[]) + step_integration_runtime_sync_credentials(test, checks=[]) + step_integration_runtime_get_monitoring_data(test, checks=[]) + step_integration_runtime_list_auth_key(test, checks=[]) + step_integration_runtime_remove_link(test, checks=[]) + step_integration_runtime_get_status(test, checks=[]) + step_integration_runtime_start(test, checks=[]) + step_integration_runtime_stop(test, checks=[]) + step_trigger_get_event_subscription_status(test, checks=[]) + step_activity_run_query_by_pipeline_run(test, checks=[]) + step_trigger_unsubscribe_from_event(test, checks=[]) + step_trigger_subscribe_to_event(test, checks=[]) + step_trigger_start(test, checks=[]) + step_trigger_stop(test, checks=[]) + step_get_git_hub_access_token(test, checks=[]) + step_get_data_plane_access(test, checks=[]) + step_pipeline_run_query_by_factory(test, checks=[]) + step_pipeline_run_cancel(test, checks=[]) + step_trigger_run_query_by_factory(test, checks=[]) + step_configure_factory_repo(test, checks=[]) + step_integration_runtime_delete(test, checks=[]) + step_trigger_delete(test, checks=[]) + step_pipeline_delete(test, checks=[]) + step_dataset_delete(test, checks=[]) + step_linked_service_delete(test, checks=[]) + step_delete(test, checks=[]) + cleanup_main(test) + +# Test class for main +@try_manual +class DatafactorymainTest(ScenarioTest): def __init__(self, *args, **kwargs): - super(DatafactoryScenarioTest, self).__init__(*args, **kwargs) + super(DatafactorymainTest, self).__init__(*args, **kwargs) self.kwargs.update({ 'subscription_id': self.get_subscription_id() }) @@ -177,7 +180,59 @@ def __init__(self, *args, **kwargs): }) @ResourceGroupPreparer(name_prefix='clitestdatafactory_exampleResourceGroup'[:7], key='rg', parameter_name='rg') - def test_datafactory_Scenario(self, rg): - call_scenario(self, rg) + def test_datafactory_main(self, rg): + call_main(self) + calc_coverage(__file__) + raise_if() + +# Env setup_managedprivateendpoint +@try_manual +def setup_managedprivateendpoint(test): + pass + + +# Env cleanup_managedprivateendpoint +@try_manual +def cleanup_managedprivateendpoint(test): + pass + + +# Testcase: managedPrivateEndpoint +@try_manual +def call_managedprivateendpoint(test): + setup_managedprivateendpoint(test) + step_create(test, checks=[]) + step_managed_virtual_network_create(test, checks=[]) + step_managed_virtual_network_list(test, checks=[]) + step_managed_virtual_network_show(test, checks=[]) + step_managed_private_endpoint_create(test, checks=[]) + step_managed_private_endpoint_list(test, checks=[]) + step_managed_private_endpoint_show(test, checks=[]) + step_delete(test, checks=[]) + cleanup_managedprivateendpoint(test) + + +# Test class for managedPrivateEndpoint +@try_manual +class DatafactorymanagedPrivateEndpointTest(ScenarioTest): + def __init__(self, *args, **kwargs): + super(DatafactorymanagedPrivateEndpointTest, self).__init__(*args, **kwargs) + self.kwargs.update({ + 'subscription_id': self.get_subscription_id() + }) + + self.kwargs.update({ + 'myFactory': self.create_random_name(prefix='exampleFactoryName'[:9], length=18), + 'myManagedVirtualNetwork': self.create_random_name(prefix='exampleManagedVirtualNetworkName'[:16], + length=32), + 'myManagedPrivateEndpoint': self.create_random_name(prefix='exampleManagedPrivateEndpointName'[:16], + length=33), + }) + + @ResourceGroupPreparer(name_prefix='clitestdatafactory_exampleResourceGroup'[:7], key='rg', parameter_name='rg') + @StorageAccountPreparer(name_prefix='clitestdatafactory_exampleBlobStorage'[:7], key='sa', + resource_group_parameter_name='rg') + def test_datafactory_managedPrivateEndpoint(self, rg): + call_managedprivateendpoint(self) calc_coverage(__file__) raise_if() diff --git a/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario_coverage.md b/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario_coverage.md index b7eabe4528a..ca7eec23d45 100644 --- a/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario_coverage.md +++ b/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario_coverage.md @@ -1,48 +1,10 @@ |Scenario|Result|ErrorMessage|ErrorStack|ErrorNormalized|StartDt|EndDt| -|step_create|successed||||2021-04-26 09:05:32.308913|2021-04-26 09:05:32.501033| -|step_update|successed||||2021-04-26 09:05:22.750754|2021-04-26 09:05:22.880707| -|step_linked_service_create|successed||||2021-04-26 09:05:22.880707|2021-04-26 09:05:23.009706| -|step_linked_service_update|successed||||2021-04-26 09:05:23.010706|2021-04-26 09:05:23.174579| -|step_dataset_create|successed||||2021-04-26 09:05:23.174579|2021-04-26 09:05:23.317043| -|step_dataset_update|successed||||2021-04-26 09:05:23.318045|2021-04-26 09:05:23.451047| -|step_pipeline_create|successed||||2021-04-26 09:05:23.452049|2021-04-26 09:05:23.575751| -|step_trigger_create|successed||||2021-04-26 09:05:23.703756|2021-04-26 09:05:23.871057| -|step_trigger_update|successed||||2021-04-26 09:05:23.871057|2021-04-26 09:05:24.019053| -|step_integration_runtime_self_hosted_create|successed||||2021-04-26 09:05:24.019053|2021-04-26 09:05:24.155099| -|step_integration_runtime_update|successed||||2021-04-26 09:05:24.155099|2021-04-26 09:05:24.285096| -|step_integration_runtime_show|successed||||2021-04-26 09:05:29.524820|2021-04-26 09:05:29.675815| -|step_linked_service_show|successed||||2021-04-26 09:05:24.582291|2021-04-26 09:05:24.718292| -|step_pipeline_show|successed||||2021-04-26 09:05:24.719291|2021-04-26 09:05:24.872517| -|step_dataset_show|successed||||2021-04-26 09:05:24.873517|2021-04-26 09:05:25.000030| -|step_trigger_show|successed||||2021-04-26 09:05:33.782136|2021-04-26 09:05:33.927138| -|step_integration_runtime_list|successed||||2021-04-26 09:05:25.115003|2021-04-26 09:05:25.253055| -|step_linked_service_list|successed||||2021-04-26 09:05:25.254059|2021-04-26 09:05:25.409635| -|step_pipeline_list|successed||||2021-04-26 09:05:25.409635|2021-04-26 09:05:25.533704| -|step_trigger_list|successed||||2021-04-26 09:05:25.533704|2021-04-26 09:05:25.676865| -|step_dataset_list|successed||||2021-04-26 09:05:25.676865|2021-04-26 09:05:25.810871| -|step_show|successed||||2021-04-26 09:05:25.810871|2021-04-26 09:05:25.938042| -|step_list2|successed||||2021-04-26 09:05:25.938042|2021-04-26 09:05:26.060042| -|step_list|successed||||2021-04-26 09:05:26.060042|2021-04-26 09:05:26.183196| -|step_integration_runtime_regenerate_auth_key|successed||||2021-04-26 09:05:26.184194|2021-04-26 09:05:26.313194| -|step_integration_runtime_sync_credentials|successed||||2021-04-26 09:05:26.314192|2021-04-26 09:05:26.449307| -|step_integration_runtime_get_monitoring_data|successed||||2021-04-26 09:05:26.449307|2021-04-26 09:05:26.636000| -|step_integration_runtime_list_auth_key|successed||||2021-04-26 09:05:26.636000|2021-04-26 09:05:26.790002| -|step_integration_runtime_remove_link|successed||||2021-04-26 09:05:26.791005|2021-04-26 09:05:26.934513| -|step_integration_runtime_get_status|successed||||2021-04-26 09:05:26.935512|2021-04-26 09:05:27.069511| -|step_trigger_get_event_subscription_status|successed||||2021-04-26 09:05:27.069511|2021-04-26 09:05:27.211487| -|step_trigger_unsubscribe_from_event|successed||||2021-04-26 09:05:27.212492|2021-04-26 09:05:27.402802| -|step_trigger_subscribe_to_event|successed||||2021-04-26 09:05:27.402802|2021-04-26 09:05:27.532807| -|step_trigger_start|successed||||2021-04-26 09:05:33.632612|2021-04-26 09:05:33.782136| -|step_trigger_stop|successed||||2021-04-26 09:05:34.611518|2021-04-26 09:05:34.768873| -|step_get_data_plane_access|successed||||2021-04-26 09:05:27.837090|2021-04-26 09:05:27.977072| -|step_configure_factory_repo|successed||||2021-04-26 09:05:28.099075|2021-04-26 09:05:28.288426| -|step_integration_runtime_delete|successed||||2021-04-26 09:05:31.965947|2021-04-26 09:05:32.140944| -|step_trigger_delete|successed||||2021-04-26 09:05:34.768873|2021-04-26 09:05:34.900878| -|step_pipeline_delete|successed||||2021-04-26 09:05:34.900878|2021-04-26 09:05:35.030991| -|step_dataset_delete|successed||||2021-04-26 09:05:28.737334|2021-04-26 09:05:28.861337| -|step_linked_service_delete|successed||||2021-04-26 09:05:28.861337|2021-04-26 09:05:28.989612| -|step_delete|successed||||2021-04-26 09:05:35.031990|2021-04-26 09:05:35.197507| -|step_integration_runtime_start|successed||||2021-04-26 09:05:29.676815|2021-04-26 09:05:30.373119| -|step_integration_runtime_stop|successed||||2021-04-26 09:05:30.374118|2021-04-26 09:05:31.964925| -|step_activity_run_query_by_pipeline_run|successed||||2021-04-26 09:05:33.012581|2021-04-26 09:05:33.193579| -Coverage: 46/46 +|step_create|successed||||2021-08-16 07:27:16.493725|2021-08-16 07:27:34.207925| +|step_managed_virtual_network_create|successed||||2021-08-16 07:27:34.207925|2021-08-16 07:27:36.767592| +|step_managed_virtual_network_list|successed||||2021-08-16 07:27:36.767592|2021-08-16 07:27:37.485625| +|step_managed_virtual_network_show|successed||||2021-08-16 07:27:37.486610|2021-08-16 07:27:38.936934| +|step_managed_private_endpoint_create|successed||||2021-08-16 07:27:38.936934|2021-08-16 07:27:40.441373| +|step_managed_private_endpoint_list|successed||||2021-08-16 07:27:40.441373|2021-08-16 07:27:41.849929| +|step_managed_private_endpoint_show|successed||||2021-08-16 07:27:41.849929|2021-08-16 07:27:43.250730| +|step_delete|successed||||2021-08-16 07:27:43.250730|2021-08-16 07:27:48.809653| +Coverage: 8/8 diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/__init__.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/__init__.py index df905149155..0363a016fcc 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/__init__.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/__init__.py @@ -7,6 +7,9 @@ # -------------------------------------------------------------------------- from ._data_factory_management_client import DataFactoryManagementClient +from ._version import VERSION + +__version__ = VERSION __all__ = ['DataFactoryManagementClient'] try: diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_configuration.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_configuration.py index 3e3cbab9738..79ca686fbd5 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_configuration.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_configuration.py @@ -12,13 +12,14 @@ from azure.core.pipeline import policies from azure.mgmt.core.policies import ARMHttpLoggingPolicy +from ._version import VERSION + if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from typing import Any from azure.core.credentials import TokenCredential -VERSION = "unknown" class DataFactoryManagementClientConfiguration(Configuration): """Configuration for DataFactoryManagementClient. @@ -49,7 +50,7 @@ def __init__( self.subscription_id = subscription_id self.api_version = "2018-06-01" self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) - kwargs.setdefault('sdk_moniker', 'datafactorymanagementclient/{}'.format(VERSION)) + kwargs.setdefault('sdk_moniker', 'mgmt-datafactory/{}'.format(VERSION)) self._configure(**kwargs) def _configure( diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_data_factory_management_client.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_data_factory_management_client.py index f272437a3e9..afb3f8d6f74 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_data_factory_management_client.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_data_factory_management_client.py @@ -45,45 +45,45 @@ class DataFactoryManagementClient(object): """The Azure Data Factory V2 management API provides a RESTful set of web services that interact with Azure Data Factory V2 services. :ivar operations: Operations operations - :vartype operations: data_factory_management_client.operations.Operations + :vartype operations: azure.mgmt.datafactory.operations.Operations :ivar factories: FactoriesOperations operations - :vartype factories: data_factory_management_client.operations.FactoriesOperations + :vartype factories: azure.mgmt.datafactory.operations.FactoriesOperations :ivar exposure_control: ExposureControlOperations operations - :vartype exposure_control: data_factory_management_client.operations.ExposureControlOperations + :vartype exposure_control: azure.mgmt.datafactory.operations.ExposureControlOperations :ivar integration_runtimes: IntegrationRuntimesOperations operations - :vartype integration_runtimes: data_factory_management_client.operations.IntegrationRuntimesOperations + :vartype integration_runtimes: azure.mgmt.datafactory.operations.IntegrationRuntimesOperations :ivar integration_runtime_object_metadata: IntegrationRuntimeObjectMetadataOperations operations - :vartype integration_runtime_object_metadata: data_factory_management_client.operations.IntegrationRuntimeObjectMetadataOperations + :vartype integration_runtime_object_metadata: azure.mgmt.datafactory.operations.IntegrationRuntimeObjectMetadataOperations :ivar integration_runtime_nodes: IntegrationRuntimeNodesOperations operations - :vartype integration_runtime_nodes: data_factory_management_client.operations.IntegrationRuntimeNodesOperations + :vartype integration_runtime_nodes: azure.mgmt.datafactory.operations.IntegrationRuntimeNodesOperations :ivar linked_services: LinkedServicesOperations operations - :vartype linked_services: data_factory_management_client.operations.LinkedServicesOperations + :vartype linked_services: azure.mgmt.datafactory.operations.LinkedServicesOperations :ivar datasets: DatasetsOperations operations - :vartype datasets: data_factory_management_client.operations.DatasetsOperations + :vartype datasets: azure.mgmt.datafactory.operations.DatasetsOperations :ivar pipelines: PipelinesOperations operations - :vartype pipelines: data_factory_management_client.operations.PipelinesOperations + :vartype pipelines: azure.mgmt.datafactory.operations.PipelinesOperations :ivar pipeline_runs: PipelineRunsOperations operations - :vartype pipeline_runs: data_factory_management_client.operations.PipelineRunsOperations + :vartype pipeline_runs: azure.mgmt.datafactory.operations.PipelineRunsOperations :ivar activity_runs: ActivityRunsOperations operations - :vartype activity_runs: data_factory_management_client.operations.ActivityRunsOperations + :vartype activity_runs: azure.mgmt.datafactory.operations.ActivityRunsOperations :ivar triggers: TriggersOperations operations - :vartype triggers: data_factory_management_client.operations.TriggersOperations + :vartype triggers: azure.mgmt.datafactory.operations.TriggersOperations :ivar trigger_runs: TriggerRunsOperations operations - :vartype trigger_runs: data_factory_management_client.operations.TriggerRunsOperations + :vartype trigger_runs: azure.mgmt.datafactory.operations.TriggerRunsOperations :ivar data_flows: DataFlowsOperations operations - :vartype data_flows: data_factory_management_client.operations.DataFlowsOperations + :vartype data_flows: azure.mgmt.datafactory.operations.DataFlowsOperations :ivar data_flow_debug_session: DataFlowDebugSessionOperations operations - :vartype data_flow_debug_session: data_factory_management_client.operations.DataFlowDebugSessionOperations + :vartype data_flow_debug_session: azure.mgmt.datafactory.operations.DataFlowDebugSessionOperations :ivar managed_virtual_networks: ManagedVirtualNetworksOperations operations - :vartype managed_virtual_networks: data_factory_management_client.operations.ManagedVirtualNetworksOperations + :vartype managed_virtual_networks: azure.mgmt.datafactory.operations.ManagedVirtualNetworksOperations :ivar managed_private_endpoints: ManagedPrivateEndpointsOperations operations - :vartype managed_private_endpoints: data_factory_management_client.operations.ManagedPrivateEndpointsOperations + :vartype managed_private_endpoints: azure.mgmt.datafactory.operations.ManagedPrivateEndpointsOperations :ivar private_end_point_connections: PrivateEndPointConnectionsOperations operations - :vartype private_end_point_connections: data_factory_management_client.operations.PrivateEndPointConnectionsOperations + :vartype private_end_point_connections: azure.mgmt.datafactory.operations.PrivateEndPointConnectionsOperations :ivar private_endpoint_connection: PrivateEndpointConnectionOperations operations - :vartype private_endpoint_connection: data_factory_management_client.operations.PrivateEndpointConnectionOperations + :vartype private_endpoint_connection: azure.mgmt.datafactory.operations.PrivateEndpointConnectionOperations :ivar private_link_resources: PrivateLinkResourcesOperations operations - :vartype private_link_resources: data_factory_management_client.operations.PrivateLinkResourcesOperations + :vartype private_link_resources: azure.mgmt.datafactory.operations.PrivateLinkResourcesOperations :param credential: Credential needed for the client to connect to Azure. :type credential: ~azure.core.credentials.TokenCredential :param subscription_id: The subscription identifier. diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_version.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_version.py new file mode 100644 index 00000000000..c47f66669f1 --- /dev/null +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_version.py @@ -0,0 +1,9 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +VERSION = "1.0.0" diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_configuration.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_configuration.py index c88a091bdb9..e540bdbfb3f 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_configuration.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_configuration.py @@ -12,11 +12,12 @@ from azure.core.pipeline import policies from azure.mgmt.core.policies import ARMHttpLoggingPolicy +from .._version import VERSION + if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from azure.core.credentials_async import AsyncTokenCredential -VERSION = "unknown" class DataFactoryManagementClientConfiguration(Configuration): """Configuration for DataFactoryManagementClient. @@ -46,7 +47,7 @@ def __init__( self.subscription_id = subscription_id self.api_version = "2018-06-01" self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) - kwargs.setdefault('sdk_moniker', 'datafactorymanagementclient/{}'.format(VERSION)) + kwargs.setdefault('sdk_moniker', 'mgmt-datafactory/{}'.format(VERSION)) self._configure(**kwargs) def _configure( diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_configuration_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_configuration_async.py deleted file mode 100644 index 411d6c4a66e..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_configuration_async.py +++ /dev/null @@ -1,67 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from typing import Any, TYPE_CHECKING - -from azure.core.configuration import Configuration -from azure.core.pipeline import policies -from azure.mgmt.core.policies import ARMHttpLoggingPolicy - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from azure.core.credentials_async import AsyncTokenCredential - -VERSION = "unknown" - -class DataFactoryManagementClientConfiguration(Configuration): - """Configuration for DataFactoryManagementClient. - - Note that all parameters used to create this instance are saved as instance - attributes. - - :param credential: Credential needed for the client to connect to Azure. - :type credential: ~azure.core.credentials_async.AsyncTokenCredential - :param subscription_id: The subscription identifier. - :type subscription_id: str - """ - - def __init__( - self, - credential: "AsyncTokenCredential", - subscription_id: str, - **kwargs: Any - ) -> None: - if credential is None: - raise ValueError("Parameter 'credential' must not be None.") - if subscription_id is None: - raise ValueError("Parameter 'subscription_id' must not be None.") - super(DataFactoryManagementClientConfiguration, self).__init__(**kwargs) - - self.credential = credential - self.subscription_id = subscription_id - self.api_version = "2018-06-01" - self.credential_scopes = ['https://management.azure.com/.default'] - self.credential_scopes.extend(kwargs.pop('credential_scopes', [])) - kwargs.setdefault('sdk_moniker', 'datafactorymanagementclient/{}'.format(VERSION)) - self._configure(**kwargs) - - def _configure( - self, - **kwargs: Any - ) -> None: - self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) - self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) - self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) - self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) - self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs) - self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs) - self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) - self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs) - self.authentication_policy = kwargs.get('authentication_policy') - if self.credential and not self.authentication_policy: - self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs) diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client.py index 255a1839c21..ce6023b2263 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client.py @@ -43,45 +43,45 @@ class DataFactoryManagementClient(object): """The Azure Data Factory V2 management API provides a RESTful set of web services that interact with Azure Data Factory V2 services. :ivar operations: Operations operations - :vartype operations: data_factory_management_client.aio.operations.Operations + :vartype operations: azure.mgmt.datafactory.aio.operations.Operations :ivar factories: FactoriesOperations operations - :vartype factories: data_factory_management_client.aio.operations.FactoriesOperations + :vartype factories: azure.mgmt.datafactory.aio.operations.FactoriesOperations :ivar exposure_control: ExposureControlOperations operations - :vartype exposure_control: data_factory_management_client.aio.operations.ExposureControlOperations + :vartype exposure_control: azure.mgmt.datafactory.aio.operations.ExposureControlOperations :ivar integration_runtimes: IntegrationRuntimesOperations operations - :vartype integration_runtimes: data_factory_management_client.aio.operations.IntegrationRuntimesOperations + :vartype integration_runtimes: azure.mgmt.datafactory.aio.operations.IntegrationRuntimesOperations :ivar integration_runtime_object_metadata: IntegrationRuntimeObjectMetadataOperations operations - :vartype integration_runtime_object_metadata: data_factory_management_client.aio.operations.IntegrationRuntimeObjectMetadataOperations + :vartype integration_runtime_object_metadata: azure.mgmt.datafactory.aio.operations.IntegrationRuntimeObjectMetadataOperations :ivar integration_runtime_nodes: IntegrationRuntimeNodesOperations operations - :vartype integration_runtime_nodes: data_factory_management_client.aio.operations.IntegrationRuntimeNodesOperations + :vartype integration_runtime_nodes: azure.mgmt.datafactory.aio.operations.IntegrationRuntimeNodesOperations :ivar linked_services: LinkedServicesOperations operations - :vartype linked_services: data_factory_management_client.aio.operations.LinkedServicesOperations + :vartype linked_services: azure.mgmt.datafactory.aio.operations.LinkedServicesOperations :ivar datasets: DatasetsOperations operations - :vartype datasets: data_factory_management_client.aio.operations.DatasetsOperations + :vartype datasets: azure.mgmt.datafactory.aio.operations.DatasetsOperations :ivar pipelines: PipelinesOperations operations - :vartype pipelines: data_factory_management_client.aio.operations.PipelinesOperations + :vartype pipelines: azure.mgmt.datafactory.aio.operations.PipelinesOperations :ivar pipeline_runs: PipelineRunsOperations operations - :vartype pipeline_runs: data_factory_management_client.aio.operations.PipelineRunsOperations + :vartype pipeline_runs: azure.mgmt.datafactory.aio.operations.PipelineRunsOperations :ivar activity_runs: ActivityRunsOperations operations - :vartype activity_runs: data_factory_management_client.aio.operations.ActivityRunsOperations + :vartype activity_runs: azure.mgmt.datafactory.aio.operations.ActivityRunsOperations :ivar triggers: TriggersOperations operations - :vartype triggers: data_factory_management_client.aio.operations.TriggersOperations + :vartype triggers: azure.mgmt.datafactory.aio.operations.TriggersOperations :ivar trigger_runs: TriggerRunsOperations operations - :vartype trigger_runs: data_factory_management_client.aio.operations.TriggerRunsOperations + :vartype trigger_runs: azure.mgmt.datafactory.aio.operations.TriggerRunsOperations :ivar data_flows: DataFlowsOperations operations - :vartype data_flows: data_factory_management_client.aio.operations.DataFlowsOperations + :vartype data_flows: azure.mgmt.datafactory.aio.operations.DataFlowsOperations :ivar data_flow_debug_session: DataFlowDebugSessionOperations operations - :vartype data_flow_debug_session: data_factory_management_client.aio.operations.DataFlowDebugSessionOperations + :vartype data_flow_debug_session: azure.mgmt.datafactory.aio.operations.DataFlowDebugSessionOperations :ivar managed_virtual_networks: ManagedVirtualNetworksOperations operations - :vartype managed_virtual_networks: data_factory_management_client.aio.operations.ManagedVirtualNetworksOperations + :vartype managed_virtual_networks: azure.mgmt.datafactory.aio.operations.ManagedVirtualNetworksOperations :ivar managed_private_endpoints: ManagedPrivateEndpointsOperations operations - :vartype managed_private_endpoints: data_factory_management_client.aio.operations.ManagedPrivateEndpointsOperations + :vartype managed_private_endpoints: azure.mgmt.datafactory.aio.operations.ManagedPrivateEndpointsOperations :ivar private_end_point_connections: PrivateEndPointConnectionsOperations operations - :vartype private_end_point_connections: data_factory_management_client.aio.operations.PrivateEndPointConnectionsOperations + :vartype private_end_point_connections: azure.mgmt.datafactory.aio.operations.PrivateEndPointConnectionsOperations :ivar private_endpoint_connection: PrivateEndpointConnectionOperations operations - :vartype private_endpoint_connection: data_factory_management_client.aio.operations.PrivateEndpointConnectionOperations + :vartype private_endpoint_connection: azure.mgmt.datafactory.aio.operations.PrivateEndpointConnectionOperations :ivar private_link_resources: PrivateLinkResourcesOperations operations - :vartype private_link_resources: data_factory_management_client.aio.operations.PrivateLinkResourcesOperations + :vartype private_link_resources: azure.mgmt.datafactory.aio.operations.PrivateLinkResourcesOperations :param credential: Credential needed for the client to connect to Azure. :type credential: ~azure.core.credentials_async.AsyncTokenCredential :param subscription_id: The subscription identifier. diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client_async.py deleted file mode 100644 index b2b322686b8..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client_async.py +++ /dev/null @@ -1,143 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from typing import Any, Optional, TYPE_CHECKING - -from azure.mgmt.core import AsyncARMPipelineClient -from msrest import Deserializer, Serializer - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from azure.core.credentials_async import AsyncTokenCredential - -from ._configuration_async import DataFactoryManagementClientConfiguration -from .operations_async import OperationOperations -from .operations_async import FactoryOperations -from .operations_async import ExposureControlOperations -from .operations_async import IntegrationRuntimeOperations -from .operations_async import IntegrationRuntimeObjectMetadataOperations -from .operations_async import IntegrationRuntimeNodeOperations -from .operations_async import LinkedServiceOperations -from .operations_async import DatasetOperations -from .operations_async import PipelineOperations -from .operations_async import PipelineRunOperations -from .operations_async import ActivityRunOperations -from .operations_async import TriggerOperations -from .operations_async import TriggerRunOperations -from .operations_async import DataFlowOperations -from .operations_async import DataFlowDebugSessionOperations -from .operations_async import ManagedVirtualNetworkOperations -from .operations_async import ManagedPrivateEndpointOperations -from .. import models - - -class DataFactoryManagementClient(object): - """The Azure Data Factory V2 management API provides a RESTful set of web services that interact with Azure Data Factory V2 services. - - :ivar operation: OperationOperations operations - :vartype operation: data_factory_management_client.aio.operations_async.OperationOperations - :ivar factory: FactoryOperations operations - :vartype factory: data_factory_management_client.aio.operations_async.FactoryOperations - :ivar exposure_control: ExposureControlOperations operations - :vartype exposure_control: data_factory_management_client.aio.operations_async.ExposureControlOperations - :ivar integration_runtime: IntegrationRuntimeOperations operations - :vartype integration_runtime: data_factory_management_client.aio.operations_async.IntegrationRuntimeOperations - :ivar integration_runtime_object_metadata: IntegrationRuntimeObjectMetadataOperations operations - :vartype integration_runtime_object_metadata: data_factory_management_client.aio.operations_async.IntegrationRuntimeObjectMetadataOperations - :ivar integration_runtime_node: IntegrationRuntimeNodeOperations operations - :vartype integration_runtime_node: data_factory_management_client.aio.operations_async.IntegrationRuntimeNodeOperations - :ivar linked_service: LinkedServiceOperations operations - :vartype linked_service: data_factory_management_client.aio.operations_async.LinkedServiceOperations - :ivar dataset: DatasetOperations operations - :vartype dataset: data_factory_management_client.aio.operations_async.DatasetOperations - :ivar pipeline: PipelineOperations operations - :vartype pipeline: data_factory_management_client.aio.operations_async.PipelineOperations - :ivar pipeline_run: PipelineRunOperations operations - :vartype pipeline_run: data_factory_management_client.aio.operations_async.PipelineRunOperations - :ivar activity_run: ActivityRunOperations operations - :vartype activity_run: data_factory_management_client.aio.operations_async.ActivityRunOperations - :ivar trigger: TriggerOperations operations - :vartype trigger: data_factory_management_client.aio.operations_async.TriggerOperations - :ivar trigger_run: TriggerRunOperations operations - :vartype trigger_run: data_factory_management_client.aio.operations_async.TriggerRunOperations - :ivar data_flow: DataFlowOperations operations - :vartype data_flow: data_factory_management_client.aio.operations_async.DataFlowOperations - :ivar data_flow_debug_session: DataFlowDebugSessionOperations operations - :vartype data_flow_debug_session: data_factory_management_client.aio.operations_async.DataFlowDebugSessionOperations - :ivar managed_virtual_network: ManagedVirtualNetworkOperations operations - :vartype managed_virtual_network: data_factory_management_client.aio.operations_async.ManagedVirtualNetworkOperations - :ivar managed_private_endpoint: ManagedPrivateEndpointOperations operations - :vartype managed_private_endpoint: data_factory_management_client.aio.operations_async.ManagedPrivateEndpointOperations - :param credential: Credential needed for the client to connect to Azure. - :type credential: ~azure.core.credentials_async.AsyncTokenCredential - :param subscription_id: The subscription identifier. - :type subscription_id: str - :param str base_url: Service URL - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - """ - - def __init__( - self, - credential: "AsyncTokenCredential", - subscription_id: str, - base_url: Optional[str] = None, - **kwargs: Any - ) -> None: - if not base_url: - base_url = 'https://management.azure.com' - self._config = DataFactoryManagementClientConfiguration(credential, subscription_id, **kwargs) - self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs) - - client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} - self._serialize = Serializer(client_models) - self._deserialize = Deserializer(client_models) - - self.operation = OperationOperations( - self._client, self._config, self._serialize, self._deserialize) - self.factory = FactoryOperations( - self._client, self._config, self._serialize, self._deserialize) - self.exposure_control = ExposureControlOperations( - self._client, self._config, self._serialize, self._deserialize) - self.integration_runtime = IntegrationRuntimeOperations( - self._client, self._config, self._serialize, self._deserialize) - self.integration_runtime_object_metadata = IntegrationRuntimeObjectMetadataOperations( - self._client, self._config, self._serialize, self._deserialize) - self.integration_runtime_node = IntegrationRuntimeNodeOperations( - self._client, self._config, self._serialize, self._deserialize) - self.linked_service = LinkedServiceOperations( - self._client, self._config, self._serialize, self._deserialize) - self.dataset = DatasetOperations( - self._client, self._config, self._serialize, self._deserialize) - self.pipeline = PipelineOperations( - self._client, self._config, self._serialize, self._deserialize) - self.pipeline_run = PipelineRunOperations( - self._client, self._config, self._serialize, self._deserialize) - self.activity_run = ActivityRunOperations( - self._client, self._config, self._serialize, self._deserialize) - self.trigger = TriggerOperations( - self._client, self._config, self._serialize, self._deserialize) - self.trigger_run = TriggerRunOperations( - self._client, self._config, self._serialize, self._deserialize) - self.data_flow = DataFlowOperations( - self._client, self._config, self._serialize, self._deserialize) - self.data_flow_debug_session = DataFlowDebugSessionOperations( - self._client, self._config, self._serialize, self._deserialize) - self.managed_virtual_network = ManagedVirtualNetworkOperations( - self._client, self._config, self._serialize, self._deserialize) - self.managed_private_endpoint = ManagedPrivateEndpointOperations( - self._client, self._config, self._serialize, self._deserialize) - - async def close(self) -> None: - await self._client.close() - - async def __aenter__(self) -> "DataFactoryManagementClient": - await self._client.__aenter__() - return self - - async def __aexit__(self, *exc_details) -> None: - await self._client.__aexit__(*exc_details) diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_activity_runs_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_activity_runs_operations.py index 39382a45d74..4742074a90c 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_activity_runs_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_activity_runs_operations.py @@ -25,7 +25,7 @@ class ActivityRunsOperations: instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models + :type models: ~azure.mgmt.datafactory.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -57,10 +57,10 @@ async def query_by_pipeline_run( :param run_id: The pipeline run identifier. :type run_id: str :param filter_parameters: Parameters to filter the activity runs. - :type filter_parameters: ~data_factory_management_client.models.RunFilterParameters + :type filter_parameters: ~azure.mgmt.datafactory.models.RunFilterParameters :keyword callable cls: A custom type or function that will be passed the direct response :return: ActivityRunsQueryResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.ActivityRunsQueryResponse + :rtype: ~azure.mgmt.datafactory.models.ActivityRunsQueryResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ActivityRunsQueryResponse"] diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_data_flow_debug_session_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_data_flow_debug_session_operations.py index dbb85249ab9..40af760f268 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_data_flow_debug_session_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_data_flow_debug_session_operations.py @@ -28,7 +28,7 @@ class DataFlowDebugSessionOperations: instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models + :type models: ~azure.mgmt.datafactory.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -116,7 +116,7 @@ async def begin_create( :param factory_name: The factory name. :type factory_name: str :param request: Data flow debug session definition. - :type request: ~data_factory_management_client.models.CreateDataFlowDebugSessionRequest + :type request: ~azure.mgmt.datafactory.models.CreateDataFlowDebugSessionRequest :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: True for ARMPolling, False for no polling, or a @@ -124,7 +124,7 @@ async def begin_create( :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either CreateDataFlowDebugSessionResponse or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~data_factory_management_client.models.CreateDataFlowDebugSessionResponse] + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.datafactory.models.CreateDataFlowDebugSessionResponse] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] @@ -187,7 +187,7 @@ def query_by_factory( :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either QueryDataFlowDebugSessionsResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.QueryDataFlowDebugSessionsResponse] + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.QueryDataFlowDebugSessionsResponse] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.QueryDataFlowDebugSessionsResponse"] @@ -261,10 +261,10 @@ async def add_data_flow( :param factory_name: The factory name. :type factory_name: str :param request: Data flow debug session definition with debug content. - :type request: ~data_factory_management_client.models.DataFlowDebugPackage + :type request: ~azure.mgmt.datafactory.models.DataFlowDebugPackage :keyword callable cls: A custom type or function that will be passed the direct response :return: AddDataFlowToDebugSessionResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.AddDataFlowToDebugSessionResponse + :rtype: ~azure.mgmt.datafactory.models.AddDataFlowToDebugSessionResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.AddDataFlowToDebugSessionResponse"] @@ -327,7 +327,7 @@ async def delete( :param factory_name: The factory name. :type factory_name: str :param request: Data flow debug session definition for deletion. - :type request: ~data_factory_management_client.models.DeleteDataFlowDebugSessionRequest + :type request: ~azure.mgmt.datafactory.models.DeleteDataFlowDebugSessionRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: None, or the result of cls(response) :rtype: None @@ -449,7 +449,7 @@ async def begin_execute_command( :param factory_name: The factory name. :type factory_name: str :param request: Data flow debug command definition. - :type request: ~data_factory_management_client.models.DataFlowDebugCommandRequest + :type request: ~azure.mgmt.datafactory.models.DataFlowDebugCommandRequest :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: True for ARMPolling, False for no polling, or a @@ -457,7 +457,7 @@ async def begin_execute_command( :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either DataFlowDebugCommandResponse or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~data_factory_management_client.models.DataFlowDebugCommandResponse] + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.datafactory.models.DataFlowDebugCommandResponse] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_data_flows_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_data_flows_operations.py index 20d1ec288ce..203e381976b 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_data_flows_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_data_flows_operations.py @@ -26,7 +26,7 @@ class DataFlowsOperations: instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models + :type models: ~azure.mgmt.datafactory.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -59,13 +59,13 @@ async def create_or_update( :param data_flow_name: The data flow name. :type data_flow_name: str :param data_flow: Data flow resource definition. - :type data_flow: ~data_factory_management_client.models.DataFlowResource + :type data_flow: ~azure.mgmt.datafactory.models.DataFlowResource :param if_match: ETag of the data flow entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. :type if_match: str :keyword callable cls: A custom type or function that will be passed the direct response :return: DataFlowResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.DataFlowResource + :rtype: ~azure.mgmt.datafactory.models.DataFlowResource :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowResource"] @@ -138,7 +138,7 @@ async def get( :type if_none_match: str :keyword callable cls: A custom type or function that will be passed the direct response :return: DataFlowResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.DataFlowResource + :rtype: ~azure.mgmt.datafactory.models.DataFlowResource :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowResource"] @@ -258,7 +258,7 @@ def list_by_factory( :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either DataFlowListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.DataFlowListResponse] + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.DataFlowListResponse] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowListResponse"] diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_datasets_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_datasets_operations.py index 23cd39c246d..e0ced18dac1 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_datasets_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_datasets_operations.py @@ -26,7 +26,7 @@ class DatasetsOperations: instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models + :type models: ~azure.mgmt.datafactory.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -55,7 +55,7 @@ def list_by_factory( :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either DatasetListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.DatasetListResponse] + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.DatasetListResponse] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.DatasetListResponse"] @@ -133,13 +133,13 @@ async def create_or_update( :param dataset_name: The dataset name. :type dataset_name: str :param dataset: Dataset resource definition. - :type dataset: ~data_factory_management_client.models.DatasetResource + :type dataset: ~azure.mgmt.datafactory.models.DatasetResource :param if_match: ETag of the dataset entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. :type if_match: str :keyword callable cls: A custom type or function that will be passed the direct response :return: DatasetResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.DatasetResource + :rtype: ~azure.mgmt.datafactory.models.DatasetResource :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.DatasetResource"] @@ -212,7 +212,7 @@ async def get( :type if_none_match: str :keyword callable cls: A custom type or function that will be passed the direct response :return: DatasetResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.DatasetResource or None + :rtype: ~azure.mgmt.datafactory.models.DatasetResource or None :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.DatasetResource"]] diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_exposure_control_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_exposure_control_operations.py index df180e52804..481eddc31af 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_exposure_control_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_exposure_control_operations.py @@ -25,7 +25,7 @@ class ExposureControlOperations: instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models + :type models: ~azure.mgmt.datafactory.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -51,10 +51,10 @@ async def get_feature_value( :param location_id: The location identifier. :type location_id: str :param exposure_control_request: The exposure control request. - :type exposure_control_request: ~data_factory_management_client.models.ExposureControlRequest + :type exposure_control_request: ~azure.mgmt.datafactory.models.ExposureControlRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: ExposureControlResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.ExposureControlResponse + :rtype: ~azure.mgmt.datafactory.models.ExposureControlResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ExposureControlResponse"] @@ -116,10 +116,10 @@ async def get_feature_value_by_factory( :param factory_name: The factory name. :type factory_name: str :param exposure_control_request: The exposure control request. - :type exposure_control_request: ~data_factory_management_client.models.ExposureControlRequest + :type exposure_control_request: ~azure.mgmt.datafactory.models.ExposureControlRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: ExposureControlResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.ExposureControlResponse + :rtype: ~azure.mgmt.datafactory.models.ExposureControlResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ExposureControlResponse"] @@ -182,10 +182,10 @@ async def query_feature_values_by_factory( :param factory_name: The factory name. :type factory_name: str :param exposure_control_batch_request: The exposure control request for list of features. - :type exposure_control_batch_request: ~data_factory_management_client.models.ExposureControlBatchRequest + :type exposure_control_batch_request: ~azure.mgmt.datafactory.models.ExposureControlBatchRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: ExposureControlBatchResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.ExposureControlBatchResponse + :rtype: ~azure.mgmt.datafactory.models.ExposureControlBatchResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ExposureControlBatchResponse"] diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_factories_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_factories_operations.py index f8b64723a03..e1cd9769777 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_factories_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_factories_operations.py @@ -26,7 +26,7 @@ class FactoriesOperations: instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models + :type models: ~azure.mgmt.datafactory.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -49,7 +49,7 @@ def list( :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either FactoryListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.FactoryListResponse] + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.FactoryListResponse] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.FactoryListResponse"] @@ -118,10 +118,10 @@ async def configure_factory_repo( :param location_id: The location identifier. :type location_id: str :param factory_repo_update: Update factory repo request definition. - :type factory_repo_update: ~data_factory_management_client.models.FactoryRepoUpdate + :type factory_repo_update: ~azure.mgmt.datafactory.models.FactoryRepoUpdate :keyword callable cls: A custom type or function that will be passed the direct response :return: Factory, or the result of cls(response) - :rtype: ~data_factory_management_client.models.Factory + :rtype: ~azure.mgmt.datafactory.models.Factory :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.Factory"] @@ -180,7 +180,7 @@ def list_by_resource_group( :type resource_group_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either FactoryListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.FactoryListResponse] + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.FactoryListResponse] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.FactoryListResponse"] @@ -254,13 +254,13 @@ async def create_or_update( :param factory_name: The factory name. :type factory_name: str :param factory: Factory resource definition. - :type factory: ~data_factory_management_client.models.Factory + :type factory: ~azure.mgmt.datafactory.models.Factory :param if_match: ETag of the factory entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. :type if_match: str :keyword callable cls: A custom type or function that will be passed the direct response :return: Factory, or the result of cls(response) - :rtype: ~data_factory_management_client.models.Factory + :rtype: ~azure.mgmt.datafactory.models.Factory :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.Factory"] @@ -325,10 +325,10 @@ async def update( :param factory_name: The factory name. :type factory_name: str :param factory_update_parameters: The parameters for updating a factory. - :type factory_update_parameters: ~data_factory_management_client.models.FactoryUpdateParameters + :type factory_update_parameters: ~azure.mgmt.datafactory.models.FactoryUpdateParameters :keyword callable cls: A custom type or function that will be passed the direct response :return: Factory, or the result of cls(response) - :rtype: ~data_factory_management_client.models.Factory + :rtype: ~azure.mgmt.datafactory.models.Factory :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.Factory"] @@ -395,7 +395,7 @@ async def get( :type if_none_match: str :keyword callable cls: A custom type or function that will be passed the direct response :return: Factory, or the result of cls(response) - :rtype: ~data_factory_management_client.models.Factory or None + :rtype: ~azure.mgmt.datafactory.models.Factory or None :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.Factory"]] @@ -512,10 +512,10 @@ async def get_git_hub_access_token( :param factory_name: The factory name. :type factory_name: str :param git_hub_access_token_request: Get GitHub access token request definition. - :type git_hub_access_token_request: ~data_factory_management_client.models.GitHubAccessTokenRequest + :type git_hub_access_token_request: ~azure.mgmt.datafactory.models.GitHubAccessTokenRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: GitHubAccessTokenResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.GitHubAccessTokenResponse + :rtype: ~azure.mgmt.datafactory.models.GitHubAccessTokenResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.GitHubAccessTokenResponse"] @@ -578,10 +578,10 @@ async def get_data_plane_access( :param factory_name: The factory name. :type factory_name: str :param policy: Data Plane user access policy definition. - :type policy: ~data_factory_management_client.models.UserAccessPolicy + :type policy: ~azure.mgmt.datafactory.models.UserAccessPolicy :keyword callable cls: A custom type or function that will be passed the direct response :return: AccessPolicyResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.AccessPolicyResponse + :rtype: ~azure.mgmt.datafactory.models.AccessPolicyResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.AccessPolicyResponse"] diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtime_nodes_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtime_nodes_operations.py index 098d00bbb3e..2e475b49a33 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtime_nodes_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtime_nodes_operations.py @@ -25,7 +25,7 @@ class IntegrationRuntimeNodesOperations: instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models + :type models: ~azure.mgmt.datafactory.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -60,7 +60,7 @@ async def get( :type node_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: SelfHostedIntegrationRuntimeNode, or the result of cls(response) - :rtype: ~data_factory_management_client.models.SelfHostedIntegrationRuntimeNode + :rtype: ~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNode :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SelfHostedIntegrationRuntimeNode"] @@ -190,10 +190,10 @@ async def update( :type node_name: str :param update_integration_runtime_node_request: The parameters for updating an integration runtime node. - :type update_integration_runtime_node_request: ~data_factory_management_client.models.UpdateIntegrationRuntimeNodeRequest + :type update_integration_runtime_node_request: ~azure.mgmt.datafactory.models.UpdateIntegrationRuntimeNodeRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: SelfHostedIntegrationRuntimeNode, or the result of cls(response) - :rtype: ~data_factory_management_client.models.SelfHostedIntegrationRuntimeNode + :rtype: ~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNode :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SelfHostedIntegrationRuntimeNode"] @@ -264,7 +264,7 @@ async def get_ip_address( :type node_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: IntegrationRuntimeNodeIpAddress, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeNodeIpAddress + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeNodeIpAddress :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeNodeIpAddress"] diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtime_object_metadata_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtime_object_metadata_operations.py index a1825a0d1bb..2b15981c5e1 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtime_object_metadata_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtime_object_metadata_operations.py @@ -27,7 +27,7 @@ class IntegrationRuntimeObjectMetadataOperations: instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models + :type models: ~azure.mgmt.datafactory.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -115,7 +115,7 @@ async def begin_refresh( :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either SsisObjectMetadataStatusResponse or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~data_factory_management_client.models.SsisObjectMetadataStatusResponse] + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.datafactory.models.SsisObjectMetadataStatusResponse] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] @@ -183,10 +183,10 @@ async def get( :param integration_runtime_name: The integration runtime name. :type integration_runtime_name: str :param get_metadata_request: The parameters for getting a SSIS object metadata. - :type get_metadata_request: ~data_factory_management_client.models.GetSsisObjectMetadataRequest + :type get_metadata_request: ~azure.mgmt.datafactory.models.GetSsisObjectMetadataRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: SsisObjectMetadataListResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.SsisObjectMetadataListResponse + :rtype: ~azure.mgmt.datafactory.models.SsisObjectMetadataListResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SsisObjectMetadataListResponse"] diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtimes_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtimes_operations.py index 6b27efc1819..8a957a86373 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtimes_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtimes_operations.py @@ -28,7 +28,7 @@ class IntegrationRuntimesOperations: instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models + :type models: ~azure.mgmt.datafactory.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -57,7 +57,7 @@ def list_by_factory( :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either IntegrationRuntimeListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.IntegrationRuntimeListResponse] + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.IntegrationRuntimeListResponse] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeListResponse"] @@ -135,13 +135,13 @@ async def create_or_update( :param integration_runtime_name: The integration runtime name. :type integration_runtime_name: str :param integration_runtime: Integration runtime resource definition. - :type integration_runtime: ~data_factory_management_client.models.IntegrationRuntimeResource + :type integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeResource :param if_match: ETag of the integration runtime entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. :type if_match: str :keyword callable cls: A custom type or function that will be passed the direct response :return: IntegrationRuntimeResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeResource + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeResource :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeResource"] @@ -215,7 +215,7 @@ async def get( :type if_none_match: str :keyword callable cls: A custom type or function that will be passed the direct response :return: IntegrationRuntimeResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeResource or None + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeResource or None :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.IntegrationRuntimeResource"]] @@ -281,10 +281,10 @@ async def update( :param integration_runtime_name: The integration runtime name. :type integration_runtime_name: str :param update_integration_runtime_request: The parameters for updating an integration runtime. - :type update_integration_runtime_request: ~data_factory_management_client.models.UpdateIntegrationRuntimeRequest + :type update_integration_runtime_request: ~azure.mgmt.datafactory.models.UpdateIntegrationRuntimeRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: IntegrationRuntimeResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeResource + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeResource :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeResource"] @@ -410,7 +410,7 @@ async def get_status( :type integration_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: IntegrationRuntimeStatusResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeStatusResponse + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"] @@ -455,6 +455,68 @@ async def get_status( return deserialized get_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getStatus'} # type: ignore + async def list_outbound_network_dependencies_endpoints( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + **kwargs + ) -> "models.IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse": + """Gets the list of outbound network dependencies for a given Azure-SSIS integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse, or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.list_outbound_network_dependencies_endpoints.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list_outbound_network_dependencies_endpoints.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/outboundNetworkDependenciesEndpoints'} # type: ignore + async def get_connection_info( self, resource_group_name: str, @@ -473,7 +535,7 @@ async def get_connection_info( :type integration_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: IntegrationRuntimeConnectionInfo, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeConnectionInfo + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeConnectionInfo :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeConnectionInfo"] @@ -536,10 +598,10 @@ async def regenerate_auth_key( :type integration_runtime_name: str :param regenerate_key_parameters: The parameters for regenerating integration runtime authentication key. - :type regenerate_key_parameters: ~data_factory_management_client.models.IntegrationRuntimeRegenerateKeyParameters + :type regenerate_key_parameters: ~azure.mgmt.datafactory.models.IntegrationRuntimeRegenerateKeyParameters :keyword callable cls: A custom type or function that will be passed the direct response :return: IntegrationRuntimeAuthKeys, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeAuthKeys + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeAuthKeys :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeAuthKeys"] @@ -606,7 +668,7 @@ async def list_auth_keys( :type integration_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: IntegrationRuntimeAuthKeys, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeAuthKeys + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeAuthKeys :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeAuthKeys"] @@ -724,7 +786,7 @@ async def begin_start( :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either IntegrationRuntimeStatusResponse or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~data_factory_management_client.models.IntegrationRuntimeStatusResponse] + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] @@ -969,7 +1031,7 @@ async def get_monitoring_data( :type integration_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: IntegrationRuntimeMonitoringData, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeMonitoringData + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeMonitoringData :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeMonitoringData"] @@ -1092,7 +1154,7 @@ async def remove_links( :type integration_runtime_name: str :param linked_integration_runtime_request: The data factory name for the linked integration runtime. - :type linked_integration_runtime_request: ~data_factory_management_client.models.LinkedIntegrationRuntimeRequest + :type linked_integration_runtime_request: ~azure.mgmt.datafactory.models.LinkedIntegrationRuntimeRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: None, or the result of cls(response) :rtype: None @@ -1159,10 +1221,10 @@ async def create_linked_integration_runtime( :param integration_runtime_name: The integration runtime name. :type integration_runtime_name: str :param create_linked_integration_runtime_request: The linked integration runtime properties. - :type create_linked_integration_runtime_request: ~data_factory_management_client.models.CreateLinkedIntegrationRuntimeRequest + :type create_linked_integration_runtime_request: ~azure.mgmt.datafactory.models.CreateLinkedIntegrationRuntimeRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: IntegrationRuntimeStatusResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeStatusResponse + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"] diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_linked_services_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_linked_services_operations.py index e6444acf5f7..a76caa476f3 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_linked_services_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_linked_services_operations.py @@ -26,7 +26,7 @@ class LinkedServicesOperations: instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models + :type models: ~azure.mgmt.datafactory.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -55,7 +55,7 @@ def list_by_factory( :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either LinkedServiceListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.LinkedServiceListResponse] + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.LinkedServiceListResponse] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceListResponse"] @@ -133,13 +133,13 @@ async def create_or_update( :param linked_service_name: The linked service name. :type linked_service_name: str :param linked_service: Linked service resource definition. - :type linked_service: ~data_factory_management_client.models.LinkedServiceResource + :type linked_service: ~azure.mgmt.datafactory.models.LinkedServiceResource :param if_match: ETag of the linkedService entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. :type if_match: str :keyword callable cls: A custom type or function that will be passed the direct response :return: LinkedServiceResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.LinkedServiceResource + :rtype: ~azure.mgmt.datafactory.models.LinkedServiceResource :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceResource"] @@ -213,7 +213,7 @@ async def get( :type if_none_match: str :keyword callable cls: A custom type or function that will be passed the direct response :return: LinkedServiceResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.LinkedServiceResource or None + :rtype: ~azure.mgmt.datafactory.models.LinkedServiceResource or None :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.LinkedServiceResource"]] diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_managed_private_endpoints_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_managed_private_endpoints_operations.py index 3a0dfd46129..5d8793b0b56 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_managed_private_endpoints_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_managed_private_endpoints_operations.py @@ -26,7 +26,7 @@ class ManagedPrivateEndpointsOperations: instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models + :type models: ~azure.mgmt.datafactory.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -58,7 +58,7 @@ def list_by_factory( :type managed_virtual_network_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either ManagedPrivateEndpointListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.ManagedPrivateEndpointListResponse] + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.ManagedPrivateEndpointListResponse] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointListResponse"] @@ -140,13 +140,13 @@ async def create_or_update( :param managed_private_endpoint_name: Managed private endpoint name. :type managed_private_endpoint_name: str :param managed_private_endpoint: Managed private endpoint resource definition. - :type managed_private_endpoint: ~data_factory_management_client.models.ManagedPrivateEndpointResource + :type managed_private_endpoint: ~azure.mgmt.datafactory.models.ManagedPrivateEndpointResource :param if_match: ETag of the managed private endpoint entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. :type if_match: str :keyword callable cls: A custom type or function that will be passed the direct response :return: ManagedPrivateEndpointResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.ManagedPrivateEndpointResource + :rtype: ~azure.mgmt.datafactory.models.ManagedPrivateEndpointResource :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointResource"] @@ -224,7 +224,7 @@ async def get( :type if_none_match: str :keyword callable cls: A custom type or function that will be passed the direct response :return: ManagedPrivateEndpointResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.ManagedPrivateEndpointResource + :rtype: ~azure.mgmt.datafactory.models.ManagedPrivateEndpointResource :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointResource"] diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_managed_virtual_networks_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_managed_virtual_networks_operations.py index 908d7b58ffe..56da8f2504e 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_managed_virtual_networks_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_managed_virtual_networks_operations.py @@ -26,7 +26,7 @@ class ManagedVirtualNetworksOperations: instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models + :type models: ~azure.mgmt.datafactory.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -55,7 +55,7 @@ def list_by_factory( :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either ManagedVirtualNetworkListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.ManagedVirtualNetworkListResponse] + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.ManagedVirtualNetworkListResponse] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkListResponse"] @@ -133,13 +133,13 @@ async def create_or_update( :param managed_virtual_network_name: Managed virtual network name. :type managed_virtual_network_name: str :param managed_virtual_network: Managed Virtual Network resource definition. - :type managed_virtual_network: ~data_factory_management_client.models.ManagedVirtualNetworkResource + :type managed_virtual_network: ~azure.mgmt.datafactory.models.ManagedVirtualNetworkResource :param if_match: ETag of the managed Virtual Network entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. :type if_match: str :keyword callable cls: A custom type or function that will be passed the direct response :return: ManagedVirtualNetworkResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.ManagedVirtualNetworkResource + :rtype: ~azure.mgmt.datafactory.models.ManagedVirtualNetworkResource :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkResource"] @@ -213,7 +213,7 @@ async def get( :type if_none_match: str :keyword callable cls: A custom type or function that will be passed the direct response :return: ManagedVirtualNetworkResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.ManagedVirtualNetworkResource + :rtype: ~azure.mgmt.datafactory.models.ManagedVirtualNetworkResource :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkResource"] diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_operations.py index 8d96ffc136c..9bad59587eb 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_operations.py @@ -26,7 +26,7 @@ class Operations: instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models + :type models: ~azure.mgmt.datafactory.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -49,7 +49,7 @@ def list( :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either OperationListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.OperationListResponse] + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.OperationListResponse] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.OperationListResponse"] diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_pipeline_runs_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_pipeline_runs_operations.py index 8d4b4efdb99..abfccb9ee57 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_pipeline_runs_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_pipeline_runs_operations.py @@ -25,7 +25,7 @@ class PipelineRunsOperations: instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models + :type models: ~azure.mgmt.datafactory.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -54,10 +54,10 @@ async def query_by_factory( :param factory_name: The factory name. :type factory_name: str :param filter_parameters: Parameters to filter the pipeline run. - :type filter_parameters: ~data_factory_management_client.models.RunFilterParameters + :type filter_parameters: ~azure.mgmt.datafactory.models.RunFilterParameters :keyword callable cls: A custom type or function that will be passed the direct response :return: PipelineRunsQueryResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.PipelineRunsQueryResponse + :rtype: ~azure.mgmt.datafactory.models.PipelineRunsQueryResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineRunsQueryResponse"] @@ -123,7 +123,7 @@ async def get( :type run_id: str :keyword callable cls: A custom type or function that will be passed the direct response :return: PipelineRun, or the result of cls(response) - :rtype: ~data_factory_management_client.models.PipelineRun + :rtype: ~azure.mgmt.datafactory.models.PipelineRun :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineRun"] diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_pipelines_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_pipelines_operations.py index 1c73e154e35..12c5792383c 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_pipelines_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_pipelines_operations.py @@ -26,7 +26,7 @@ class PipelinesOperations: instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models + :type models: ~azure.mgmt.datafactory.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -55,7 +55,7 @@ def list_by_factory( :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either PipelineListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.PipelineListResponse] + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.PipelineListResponse] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineListResponse"] @@ -133,13 +133,13 @@ async def create_or_update( :param pipeline_name: The pipeline name. :type pipeline_name: str :param pipeline: Pipeline resource definition. - :type pipeline: ~data_factory_management_client.models.PipelineResource + :type pipeline: ~azure.mgmt.datafactory.models.PipelineResource :param if_match: ETag of the pipeline entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. :type if_match: str :keyword callable cls: A custom type or function that will be passed the direct response :return: PipelineResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.PipelineResource + :rtype: ~azure.mgmt.datafactory.models.PipelineResource :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineResource"] @@ -212,7 +212,7 @@ async def get( :type if_none_match: str :keyword callable cls: A custom type or function that will be passed the direct response :return: PipelineResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.PipelineResource or None + :rtype: ~azure.mgmt.datafactory.models.PipelineResource or None :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.PipelineResource"]] @@ -357,7 +357,7 @@ async def create_run( :type parameters: dict[str, object] :keyword callable cls: A custom type or function that will be passed the direct response :return: CreateRunResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.CreateRunResponse + :rtype: ~azure.mgmt.datafactory.models.CreateRunResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.CreateRunResponse"] diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_private_end_point_connections_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_private_end_point_connections_operations.py index 4dabd9932f8..c6e085a66e9 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_private_end_point_connections_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_private_end_point_connections_operations.py @@ -26,7 +26,7 @@ class PrivateEndPointConnectionsOperations: instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models + :type models: ~azure.mgmt.datafactory.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -55,7 +55,7 @@ def list_by_factory( :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either PrivateEndpointConnectionListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.PrivateEndpointConnectionListResponse] + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.PrivateEndpointConnectionListResponse] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpointConnectionListResponse"] diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_private_endpoint_connection_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_private_endpoint_connection_operations.py index 90ee37632ce..a9bdb71c6ca 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_private_endpoint_connection_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_private_endpoint_connection_operations.py @@ -25,7 +25,7 @@ class PrivateEndpointConnectionOperations: instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models + :type models: ~azure.mgmt.datafactory.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -58,13 +58,13 @@ async def create_or_update( :param private_endpoint_connection_name: The private endpoint connection name. :type private_endpoint_connection_name: str :param private_endpoint_wrapper: - :type private_endpoint_wrapper: ~data_factory_management_client.models.PrivateLinkConnectionApprovalRequestResource + :type private_endpoint_wrapper: ~azure.mgmt.datafactory.models.PrivateLinkConnectionApprovalRequestResource :param if_match: ETag of the private endpoint connection entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. :type if_match: str :keyword callable cls: A custom type or function that will be passed the direct response :return: PrivateEndpointConnectionResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.PrivateEndpointConnectionResource + :rtype: ~azure.mgmt.datafactory.models.PrivateEndpointConnectionResource :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpointConnectionResource"] @@ -138,7 +138,7 @@ async def get( :type if_none_match: str :keyword callable cls: A custom type or function that will be passed the direct response :return: PrivateEndpointConnectionResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.PrivateEndpointConnectionResource + :rtype: ~azure.mgmt.datafactory.models.PrivateEndpointConnectionResource :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpointConnectionResource"] diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_private_link_resources_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_private_link_resources_operations.py index fd47a6c7373..bd2a99f5def 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_private_link_resources_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_private_link_resources_operations.py @@ -25,7 +25,7 @@ class PrivateLinkResourcesOperations: instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models + :type models: ~azure.mgmt.datafactory.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -54,7 +54,7 @@ async def get( :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: PrivateLinkResourcesWrapper, or the result of cls(response) - :rtype: ~data_factory_management_client.models.PrivateLinkResourcesWrapper + :rtype: ~azure.mgmt.datafactory.models.PrivateLinkResourcesWrapper :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateLinkResourcesWrapper"] diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_trigger_runs_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_trigger_runs_operations.py index 7fbcbc61f39..3a90a28b3b0 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_trigger_runs_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_trigger_runs_operations.py @@ -25,7 +25,7 @@ class TriggerRunsOperations: instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models + :type models: ~azure.mgmt.datafactory.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -180,10 +180,10 @@ async def query_by_factory( :param factory_name: The factory name. :type factory_name: str :param filter_parameters: Parameters to filter the pipeline run. - :type filter_parameters: ~data_factory_management_client.models.RunFilterParameters + :type filter_parameters: ~azure.mgmt.datafactory.models.RunFilterParameters :keyword callable cls: A custom type or function that will be passed the direct response :return: TriggerRunsQueryResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.TriggerRunsQueryResponse + :rtype: ~azure.mgmt.datafactory.models.TriggerRunsQueryResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerRunsQueryResponse"] diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_triggers_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_triggers_operations.py index a9f7bd54c4d..1a7a49887e2 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_triggers_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_triggers_operations.py @@ -28,7 +28,7 @@ class TriggersOperations: instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models + :type models: ~azure.mgmt.datafactory.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -57,7 +57,7 @@ def list_by_factory( :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either TriggerListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.TriggerListResponse] + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.TriggerListResponse] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerListResponse"] @@ -131,10 +131,10 @@ async def query_by_factory( :param factory_name: The factory name. :type factory_name: str :param filter_parameters: Parameters to filter the triggers. - :type filter_parameters: ~data_factory_management_client.models.TriggerFilterParameters + :type filter_parameters: ~azure.mgmt.datafactory.models.TriggerFilterParameters :keyword callable cls: A custom type or function that will be passed the direct response :return: TriggerQueryResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.TriggerQueryResponse + :rtype: ~azure.mgmt.datafactory.models.TriggerQueryResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerQueryResponse"] @@ -201,13 +201,13 @@ async def create_or_update( :param trigger_name: The trigger name. :type trigger_name: str :param trigger: Trigger resource definition. - :type trigger: ~data_factory_management_client.models.TriggerResource + :type trigger: ~azure.mgmt.datafactory.models.TriggerResource :param if_match: ETag of the trigger entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. :type if_match: str :keyword callable cls: A custom type or function that will be passed the direct response :return: TriggerResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.TriggerResource + :rtype: ~azure.mgmt.datafactory.models.TriggerResource :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerResource"] @@ -280,7 +280,7 @@ async def get( :type if_none_match: str :keyword callable cls: A custom type or function that will be passed the direct response :return: TriggerResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.TriggerResource or None + :rtype: ~azure.mgmt.datafactory.models.TriggerResource or None :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerResource"]] @@ -461,7 +461,7 @@ async def begin_subscribe_to_events( :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either TriggerSubscriptionOperationStatus or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~data_factory_management_client.models.TriggerSubscriptionOperationStatus] + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.datafactory.models.TriggerSubscriptionOperationStatus] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] @@ -528,7 +528,7 @@ async def get_event_subscription_status( :type trigger_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: TriggerSubscriptionOperationStatus, or the result of cls(response) - :rtype: ~data_factory_management_client.models.TriggerSubscriptionOperationStatus + :rtype: ~azure.mgmt.datafactory.models.TriggerSubscriptionOperationStatus :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"] @@ -646,7 +646,7 @@ async def begin_unsubscribe_from_events( :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either TriggerSubscriptionOperationStatus or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~data_factory_management_client.models.TriggerSubscriptionOperationStatus] + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.datafactory.models.TriggerSubscriptionOperationStatus] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/__init__.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/__init__.py deleted file mode 100644 index 554e3ba9232..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/__init__.py +++ /dev/null @@ -1,45 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from ._operation_operations_async import OperationOperations -from ._factory_operations_async import FactoryOperations -from ._exposure_control_operations_async import ExposureControlOperations -from ._integration_runtime_operations_async import IntegrationRuntimeOperations -from ._integration_runtime_object_metadata_operations_async import IntegrationRuntimeObjectMetadataOperations -from ._integration_runtime_node_operations_async import IntegrationRuntimeNodeOperations -from ._linked_service_operations_async import LinkedServiceOperations -from ._dataset_operations_async import DatasetOperations -from ._pipeline_operations_async import PipelineOperations -from ._pipeline_run_operations_async import PipelineRunOperations -from ._activity_run_operations_async import ActivityRunOperations -from ._trigger_operations_async import TriggerOperations -from ._trigger_run_operations_async import TriggerRunOperations -from ._data_flow_operations_async import DataFlowOperations -from ._data_flow_debug_session_operations_async import DataFlowDebugSessionOperations -from ._managed_virtual_network_operations_async import ManagedVirtualNetworkOperations -from ._managed_private_endpoint_operations_async import ManagedPrivateEndpointOperations - -__all__ = [ - 'OperationOperations', - 'FactoryOperations', - 'ExposureControlOperations', - 'IntegrationRuntimeOperations', - 'IntegrationRuntimeObjectMetadataOperations', - 'IntegrationRuntimeNodeOperations', - 'LinkedServiceOperations', - 'DatasetOperations', - 'PipelineOperations', - 'PipelineRunOperations', - 'ActivityRunOperations', - 'TriggerOperations', - 'TriggerRunOperations', - 'DataFlowOperations', - 'DataFlowDebugSessionOperations', - 'ManagedVirtualNetworkOperations', - 'ManagedPrivateEndpointOperations', -] diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_activity_run_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_activity_run_operations_async.py deleted file mode 100644 index 0d2e56be08b..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_activity_run_operations_async.py +++ /dev/null @@ -1,127 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -import datetime -from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class ActivityRunOperations: - """ActivityRunOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - async def query_by_pipeline_run( - self, - resource_group_name: str, - factory_name: str, - run_id: str, - last_updated_after: datetime.datetime, - last_updated_before: datetime.datetime, - continuation_token_parameter: Optional[str] = None, - filters: Optional[List["models.RunQueryFilter"]] = None, - order_by: Optional[List["models.RunQueryOrderBy"]] = None, - **kwargs - ) -> "models.ActivityRunsQueryResponse": - """Query activity runs based on input filter conditions. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param run_id: The pipeline run identifier. - :type run_id: str - :param last_updated_after: The time at or after which the run event was updated in 'ISO 8601' - format. - :type last_updated_after: ~datetime.datetime - :param last_updated_before: The time at or before which the run event was updated in 'ISO 8601' - format. - :type last_updated_before: ~datetime.datetime - :param continuation_token_parameter: The continuation token for getting the next page of - results. Null for first page. - :type continuation_token_parameter: str - :param filters: List of filters. - :type filters: list[~data_factory_management_client.models.RunQueryFilter] - :param order_by: List of OrderBy option. - :type order_by: list[~data_factory_management_client.models.RunQueryOrderBy] - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ActivityRunsQueryResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.ActivityRunsQueryResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ActivityRunsQueryResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - filter_parameters = models.RunFilterParameters(continuation_token=continuation_token_parameter, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, order_by=order_by) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.query_by_pipeline_run.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'runId': self._serialize.url("run_id", run_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('ActivityRunsQueryResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - query_by_pipeline_run.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}/queryActivityruns'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_data_flow_debug_session_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_data_flow_debug_session_operations_async.py deleted file mode 100644 index f1bf8ee8f73..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_data_flow_debug_session_operations_async.py +++ /dev/null @@ -1,551 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, List, Optional, TypeVar, Union -import warnings - -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.mgmt.core.exceptions import ARMErrorFormat -from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling - -from ... import models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class DataFlowDebugSessionOperations: - """DataFlowDebugSessionOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - async def _create_initial( - self, - resource_group_name: str, - factory_name: str, - compute_type: Optional[str] = None, - core_count: Optional[int] = None, - time_to_live: Optional[int] = None, - name: Optional[str] = None, - properties: Optional["models.IntegrationRuntime"] = None, - **kwargs - ) -> Optional["models.CreateDataFlowDebugSessionResponse"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.CreateDataFlowDebugSessionResponse"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - request = models.CreateDataFlowDebugSessionRequest(compute_type=compute_type, core_count=core_count, time_to_live=time_to_live, name=name, properties=properties) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self._create_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(request, 'CreateDataFlowDebugSessionRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - response_headers = {} - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('CreateDataFlowDebugSessionResponse', pipeline_response) - - if response.status_code == 202: - response_headers['location']=self._deserialize('str', response.headers.get('location')) - - if cls: - return cls(pipeline_response, deserialized, response_headers) - - return deserialized - _create_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/createDataFlowDebugSession'} # type: ignore - - async def begin_create( - self, - resource_group_name: str, - factory_name: str, - compute_type: Optional[str] = None, - core_count: Optional[int] = None, - time_to_live: Optional[int] = None, - name: Optional[str] = None, - properties: Optional["models.IntegrationRuntime"] = None, - **kwargs - ) -> AsyncLROPoller["models.CreateDataFlowDebugSessionResponse"]: - """Creates a data flow debug session. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param compute_type: Compute type of the cluster. The value will be overwritten by the same - setting in integration runtime if provided. - :type compute_type: str - :param core_count: Core count of the cluster. The value will be overwritten by the same setting - in integration runtime if provided. - :type core_count: int - :param time_to_live: Time to live setting of the cluster in minutes. - :type time_to_live: int - :param name: The resource name. - :type name: str - :param properties: Integration runtime properties. - :type properties: ~data_factory_management_client.models.IntegrationRuntime - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either CreateDataFlowDebugSessionResponse or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~data_factory_management_client.models.CreateDataFlowDebugSessionResponse] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.CreateDataFlowDebugSessionResponse"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = await self._create_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - compute_type=compute_type, - core_count=core_count, - time_to_live=time_to_live, - name=name, - properties=properties, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize('CreateDataFlowDebugSessionResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling - if cont_token: - return AsyncLROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/createDataFlowDebugSession'} # type: ignore - - def query_by_factory( - self, - resource_group_name: str, - factory_name: str, - **kwargs - ) -> AsyncIterable["models.QueryDataFlowDebugSessionsResponse"]: - """Query all active data flow debug sessions. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either QueryDataFlowDebugSessionsResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.QueryDataFlowDebugSessionsResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.QueryDataFlowDebugSessionsResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.query_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.post(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - async def extract_data(pipeline_response): - deserialized = self._deserialize('QueryDataFlowDebugSessionsResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged( - get_next, extract_data - ) - query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryDataFlowDebugSessions'} # type: ignore - - async def add_data_flow( - self, - resource_group_name: str, - factory_name: str, - session_id: Optional[str] = None, - datasets: Optional[List["models.DatasetDebugResource"]] = None, - linked_services: Optional[List["models.LinkedServiceDebugResource"]] = None, - source_settings: Optional[List["models.DataFlowSourceSetting"]] = None, - parameters: Optional[Dict[str, object]] = None, - dataset_parameters: Optional[object] = None, - folder_path: Optional[object] = None, - reference_name: Optional[str] = None, - name: Optional[str] = None, - properties: Optional["models.DataFlow"] = None, - **kwargs - ) -> "models.AddDataFlowToDebugSessionResponse": - """Add a data flow into debug session. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param session_id: The ID of data flow debug session. - :type session_id: str - :param datasets: List of datasets. - :type datasets: list[~data_factory_management_client.models.DatasetDebugResource] - :param linked_services: List of linked services. - :type linked_services: list[~data_factory_management_client.models.LinkedServiceDebugResource] - :param source_settings: Source setting for data flow debug. - :type source_settings: list[~data_factory_management_client.models.DataFlowSourceSetting] - :param parameters: Data flow parameters. - :type parameters: dict[str, object] - :param dataset_parameters: Parameters for dataset. - :type dataset_parameters: object - :param folder_path: Folder path for staging blob. Type: string (or Expression with resultType - string). - :type folder_path: object - :param reference_name: Reference LinkedService name. - :type reference_name: str - :param name: The resource name. - :type name: str - :param properties: Data flow properties. - :type properties: ~data_factory_management_client.models.DataFlow - :keyword callable cls: A custom type or function that will be passed the direct response - :return: AddDataFlowToDebugSessionResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.AddDataFlowToDebugSessionResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.AddDataFlowToDebugSessionResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - request = models.DataFlowDebugPackage(session_id=session_id, datasets=datasets, linked_services=linked_services, source_settings=source_settings, parameters_debug_settings_parameters=parameters, dataset_parameters=dataset_parameters, folder_path=folder_path, reference_name=reference_name, name=name, properties=properties) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.add_data_flow.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(request, 'DataFlowDebugPackage') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('AddDataFlowToDebugSessionResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - add_data_flow.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/addDataFlowToDebugSession'} # type: ignore - - async def delete( - self, - resource_group_name: str, - factory_name: str, - session_id: Optional[str] = None, - **kwargs - ) -> None: - """Deletes a data flow debug session. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param session_id: The ID of data flow debug session. - :type session_id: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - request = models.DeleteDataFlowDebugSessionRequest(session_id=session_id) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(request, 'DeleteDataFlowDebugSessionRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/deleteDataFlowDebugSession'} # type: ignore - - async def _execute_command_initial( - self, - resource_group_name: str, - factory_name: str, - session_id: Optional[str] = None, - command: Optional[Union[str, "models.DataFlowDebugCommandType"]] = None, - command_payload: Optional["models.DataFlowDebugCommandPayload"] = None, - **kwargs - ) -> Optional["models.DataFlowDebugCommandResponse"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.DataFlowDebugCommandResponse"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - request = models.DataFlowDebugCommandRequest(session_id=session_id, command=command, command_payload=command_payload) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self._execute_command_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(request, 'DataFlowDebugCommandRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - response_headers = {} - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('DataFlowDebugCommandResponse', pipeline_response) - - if response.status_code == 202: - response_headers['location']=self._deserialize('str', response.headers.get('location')) - - if cls: - return cls(pipeline_response, deserialized, response_headers) - - return deserialized - _execute_command_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/executeDataFlowDebugCommand'} # type: ignore - - async def begin_execute_command( - self, - resource_group_name: str, - factory_name: str, - session_id: Optional[str] = None, - command: Optional[Union[str, "models.DataFlowDebugCommandType"]] = None, - command_payload: Optional["models.DataFlowDebugCommandPayload"] = None, - **kwargs - ) -> AsyncLROPoller["models.DataFlowDebugCommandResponse"]: - """Execute a data flow debug command. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param session_id: The ID of data flow debug session. - :type session_id: str - :param command: The command type. - :type command: str or ~data_factory_management_client.models.DataFlowDebugCommandType - :param command_payload: The command payload object. - :type command_payload: ~data_factory_management_client.models.DataFlowDebugCommandPayload - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either DataFlowDebugCommandResponse or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~data_factory_management_client.models.DataFlowDebugCommandResponse] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowDebugCommandResponse"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = await self._execute_command_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - session_id=session_id, - command=command, - command_payload=command_payload, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize('DataFlowDebugCommandResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling - if cont_token: - return AsyncLROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_execute_command.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/executeDataFlowDebugCommand'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_data_flow_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_data_flow_operations_async.py deleted file mode 100644 index b5c2e5656ce..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_data_flow_operations_async.py +++ /dev/null @@ -1,309 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar -import warnings - -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class DataFlowOperations: - """DataFlowOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - async def create_or_update( - self, - resource_group_name: str, - factory_name: str, - data_flow_name: str, - properties: "models.DataFlow", - if_match: Optional[str] = None, - **kwargs - ) -> "models.DataFlowResource": - """Creates or updates a data flow. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param data_flow_name: The data flow name. - :type data_flow_name: str - :param properties: Data flow properties. - :type properties: ~data_factory_management_client.models.DataFlow - :param if_match: ETag of the data flow entity. Should only be specified for update, for which - it should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: DataFlowResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.DataFlowResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - data_flow = models.DataFlowResource(properties=properties) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'dataFlowName': self._serialize.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(data_flow, 'DataFlowResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('DataFlowResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}'} # type: ignore - - async def get( - self, - resource_group_name: str, - factory_name: str, - data_flow_name: str, - if_none_match: Optional[str] = None, - **kwargs - ) -> "models.DataFlowResource": - """Gets a data flow. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param data_flow_name: The data flow name. - :type data_flow_name: str - :param if_none_match: ETag of the data flow entity. Should only be specified for get. If the - ETag matches the existing entity tag, or if * was provided, then no content will be returned. - :type if_none_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: DataFlowResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.DataFlowResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'dataFlowName': self._serialize.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('DataFlowResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}'} # type: ignore - - async def delete( - self, - resource_group_name: str, - factory_name: str, - data_flow_name: str, - **kwargs - ) -> None: - """Deletes a data flow. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param data_flow_name: The data flow name. - :type data_flow_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'dataFlowName': self._serialize.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}'} # type: ignore - - def list_by_factory( - self, - resource_group_name: str, - factory_name: str, - **kwargs - ) -> AsyncIterable["models.DataFlowListResponse"]: - """Lists data flows. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either DataFlowListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.DataFlowListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - async def extract_data(pipeline_response): - deserialized = self._deserialize('DataFlowListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_dataset_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_dataset_operations_async.py deleted file mode 100644 index a8be0369365..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_dataset_operations_async.py +++ /dev/null @@ -1,311 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar -import warnings - -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class DatasetOperations: - """DatasetOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list_by_factory( - self, - resource_group_name: str, - factory_name: str, - **kwargs - ) -> AsyncIterable["models.DatasetListResponse"]: - """Lists datasets. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either DatasetListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.DatasetListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.DatasetListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - async def extract_data(pipeline_response): - deserialized = self._deserialize('DatasetListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets'} # type: ignore - - async def create_or_update( - self, - resource_group_name: str, - factory_name: str, - dataset_name: str, - properties: "models.Dataset", - if_match: Optional[str] = None, - **kwargs - ) -> "models.DatasetResource": - """Creates or updates a dataset. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param dataset_name: The dataset name. - :type dataset_name: str - :param properties: Dataset properties. - :type properties: ~data_factory_management_client.models.Dataset - :param if_match: ETag of the dataset entity. Should only be specified for update, for which it - should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: DatasetResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.DatasetResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.DatasetResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - dataset = models.DatasetResource(properties=properties) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'datasetName': self._serialize.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(dataset, 'DatasetResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('DatasetResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}'} # type: ignore - - async def get( - self, - resource_group_name: str, - factory_name: str, - dataset_name: str, - if_none_match: Optional[str] = None, - **kwargs - ) -> Optional["models.DatasetResource"]: - """Gets a dataset. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param dataset_name: The dataset name. - :type dataset_name: str - :param if_none_match: ETag of the dataset entity. Should only be specified for get. If the ETag - matches the existing entity tag, or if * was provided, then no content will be returned. - :type if_none_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: DatasetResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.DatasetResource or None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.DatasetResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'datasetName': self._serialize.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 304]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('DatasetResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}'} # type: ignore - - async def delete( - self, - resource_group_name: str, - factory_name: str, - dataset_name: str, - **kwargs - ) -> None: - """Deletes a dataset. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param dataset_name: The dataset name. - :type dataset_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'datasetName': self._serialize.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_exposure_control_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_exposure_control_operations_async.py deleted file mode 100644 index b20acb1c3c8..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_exposure_control_operations_async.py +++ /dev/null @@ -1,241 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class ExposureControlOperations: - """ExposureControlOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - async def get_feature_value( - self, - location_id: str, - feature_name: Optional[str] = None, - feature_type: Optional[str] = None, - **kwargs - ) -> "models.ExposureControlResponse": - """Get exposure control feature for specific location. - - :param location_id: The location identifier. - :type location_id: str - :param feature_name: The feature name. - :type feature_name: str - :param feature_type: The feature type. - :type feature_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ExposureControlResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.ExposureControlResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ExposureControlResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - exposure_control_request = models.ExposureControlRequest(feature_name=feature_name, feature_type=feature_type) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.get_feature_value.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'locationId': self._serialize.url("location_id", location_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(exposure_control_request, 'ExposureControlRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('ExposureControlResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get_feature_value.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/locations/{locationId}/getFeatureValue'} # type: ignore - - async def get_feature_value_by_factory( - self, - resource_group_name: str, - factory_name: str, - feature_name: Optional[str] = None, - feature_type: Optional[str] = None, - **kwargs - ) -> "models.ExposureControlResponse": - """Get exposure control feature for specific factory. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param feature_name: The feature name. - :type feature_name: str - :param feature_type: The feature type. - :type feature_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ExposureControlResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.ExposureControlResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ExposureControlResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - exposure_control_request = models.ExposureControlRequest(feature_name=feature_name, feature_type=feature_type) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.get_feature_value_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(exposure_control_request, 'ExposureControlRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('ExposureControlResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get_feature_value_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getFeatureValue'} # type: ignore - - async def query_feature_value_by_factory( - self, - resource_group_name: str, - factory_name: str, - exposure_control_requests: List["models.ExposureControlRequest"], - **kwargs - ) -> "models.ExposureControlBatchResponse": - """Get list of exposure control features for specific factory. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param exposure_control_requests: List of exposure control features. - :type exposure_control_requests: list[~data_factory_management_client.models.ExposureControlRequest] - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ExposureControlBatchResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.ExposureControlBatchResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ExposureControlBatchResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - exposure_control_batch_request = models.ExposureControlBatchRequest(exposure_control_requests=exposure_control_requests) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.query_feature_value_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(exposure_control_batch_request, 'ExposureControlBatchRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('ExposureControlBatchResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - query_feature_value_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryFeaturesValue'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_factory_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_factory_operations_async.py deleted file mode 100644 index 46f37c1a6f7..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_factory_operations_async.py +++ /dev/null @@ -1,658 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar -import warnings - -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class FactoryOperations: - """FactoryOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list( - self, - **kwargs - ) -> AsyncIterable["models.FactoryListResponse"]: - """Lists factories under the specified subscription. - - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either FactoryListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.FactoryListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.FactoryListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - async def extract_data(pipeline_response): - deserialized = self._deserialize('FactoryListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/factories'} # type: ignore - - async def configure_factory_repo( - self, - location_id: str, - factory_resource_id: Optional[str] = None, - repo_configuration: Optional["models.FactoryRepoConfiguration"] = None, - **kwargs - ) -> "models.Factory": - """Updates a factory's repo information. - - :param location_id: The location identifier. - :type location_id: str - :param factory_resource_id: The factory resource id. - :type factory_resource_id: str - :param repo_configuration: Git repo information of the factory. - :type repo_configuration: ~data_factory_management_client.models.FactoryRepoConfiguration - :keyword callable cls: A custom type or function that will be passed the direct response - :return: Factory, or the result of cls(response) - :rtype: ~data_factory_management_client.models.Factory - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Factory"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - factory_repo_update = models.FactoryRepoUpdate(factory_resource_id=factory_resource_id, repo_configuration=repo_configuration) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.configure_factory_repo.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'locationId': self._serialize.url("location_id", location_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(factory_repo_update, 'FactoryRepoUpdate') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('Factory', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - configure_factory_repo.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/locations/{locationId}/configureFactoryRepo'} # type: ignore - - def list_by_resource_group( - self, - resource_group_name: str, - **kwargs - ) -> AsyncIterable["models.FactoryListResponse"]: - """Lists factories. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either FactoryListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.FactoryListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.FactoryListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list_by_resource_group.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - async def extract_data(pipeline_response): - deserialized = self._deserialize('FactoryListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged( - get_next, extract_data - ) - list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories'} # type: ignore - - async def create_or_update( - self, - resource_group_name: str, - factory_name: str, - if_match: Optional[str] = None, - location: Optional[str] = None, - tags: Optional[Dict[str, str]] = None, - identity: Optional["models.FactoryIdentity"] = None, - repo_configuration: Optional["models.FactoryRepoConfiguration"] = None, - global_parameters: Optional[Dict[str, "models.GlobalParameterSpecification"]] = None, - **kwargs - ) -> "models.Factory": - """Creates or updates a factory. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param if_match: ETag of the factory entity. Should only be specified for update, for which it - should match existing entity or can be * for unconditional update. - :type if_match: str - :param location: The resource location. - :type location: str - :param tags: The resource tags. - :type tags: dict[str, str] - :param identity: Managed service identity of the factory. - :type identity: ~data_factory_management_client.models.FactoryIdentity - :param repo_configuration: Git repo information of the factory. - :type repo_configuration: ~data_factory_management_client.models.FactoryRepoConfiguration - :param global_parameters: List of parameters for factory. - :type global_parameters: dict[str, ~data_factory_management_client.models.GlobalParameterSpecification] - :keyword callable cls: A custom type or function that will be passed the direct response - :return: Factory, or the result of cls(response) - :rtype: ~data_factory_management_client.models.Factory - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Factory"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - factory = models.Factory(location=location, tags=tags, identity=identity, repo_configuration=repo_configuration, global_parameters=global_parameters) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(factory, 'Factory') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('Factory', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} # type: ignore - - async def update( - self, - resource_group_name: str, - factory_name: str, - tags: Optional[Dict[str, str]] = None, - identity: Optional["models.FactoryIdentity"] = None, - **kwargs - ) -> "models.Factory": - """Updates a factory. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param tags: The resource tags. - :type tags: dict[str, str] - :param identity: Managed service identity of the factory. - :type identity: ~data_factory_management_client.models.FactoryIdentity - :keyword callable cls: A custom type or function that will be passed the direct response - :return: Factory, or the result of cls(response) - :rtype: ~data_factory_management_client.models.Factory - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Factory"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - factory_update_parameters = models.FactoryUpdateParameters(tags=tags, identity=identity) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(factory_update_parameters, 'FactoryUpdateParameters') - body_content_kwargs['content'] = body_content - request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('Factory', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} # type: ignore - - async def get( - self, - resource_group_name: str, - factory_name: str, - if_none_match: Optional[str] = None, - **kwargs - ) -> Optional["models.Factory"]: - """Gets a factory. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param if_none_match: ETag of the factory entity. Should only be specified for get. If the ETag - matches the existing entity tag, or if * was provided, then no content will be returned. - :type if_none_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: Factory, or the result of cls(response) - :rtype: ~data_factory_management_client.models.Factory or None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.Factory"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 304]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('Factory', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} # type: ignore - - async def delete( - self, - resource_group_name: str, - factory_name: str, - **kwargs - ) -> None: - """Deletes a factory. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} # type: ignore - - async def get_git_hub_access_token( - self, - resource_group_name: str, - factory_name: str, - git_hub_access_code: str, - git_hub_access_token_base_url: str, - git_hub_client_id: Optional[str] = None, - **kwargs - ) -> "models.GitHubAccessTokenResponse": - """Get GitHub Access Token. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param git_hub_access_code: GitHub access code. - :type git_hub_access_code: str - :param git_hub_access_token_base_url: GitHub access token base URL. - :type git_hub_access_token_base_url: str - :param git_hub_client_id: GitHub application client ID. - :type git_hub_client_id: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: GitHubAccessTokenResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.GitHubAccessTokenResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.GitHubAccessTokenResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - git_hub_access_token_request = models.GitHubAccessTokenRequest(git_hub_access_code=git_hub_access_code, git_hub_client_id=git_hub_client_id, git_hub_access_token_base_url=git_hub_access_token_base_url) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.get_git_hub_access_token.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(git_hub_access_token_request, 'GitHubAccessTokenRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('GitHubAccessTokenResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get_git_hub_access_token.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getGitHubAccessToken'} # type: ignore - - async def get_data_plane_access( - self, - resource_group_name: str, - factory_name: str, - permissions: Optional[str] = None, - access_resource_path: Optional[str] = None, - profile_name: Optional[str] = None, - start_time: Optional[str] = None, - expire_time: Optional[str] = None, - **kwargs - ) -> "models.AccessPolicyResponse": - """Get Data Plane access. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param permissions: The string with permissions for Data Plane access. Currently only 'r' is - supported which grants read only access. - :type permissions: str - :param access_resource_path: The resource path to get access relative to factory. Currently - only empty string is supported which corresponds to the factory resource. - :type access_resource_path: str - :param profile_name: The name of the profile. Currently only the default is supported. The - default value is DefaultProfile. - :type profile_name: str - :param start_time: Start time for the token. If not specified the current time will be used. - :type start_time: str - :param expire_time: Expiration time for the token. Maximum duration for the token is eight - hours and by default the token will expire in eight hours. - :type expire_time: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: AccessPolicyResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.AccessPolicyResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.AccessPolicyResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - policy = models.UserAccessPolicy(permissions=permissions, access_resource_path=access_resource_path, profile_name=profile_name, start_time=start_time, expire_time=expire_time) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.get_data_plane_access.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(policy, 'UserAccessPolicy') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('AccessPolicyResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get_data_plane_access.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getDataPlaneAccess'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_node_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_node_operations_async.py deleted file mode 100644 index a6022196653..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_node_operations_async.py +++ /dev/null @@ -1,301 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import Any, Callable, Dict, Generic, Optional, TypeVar -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class IntegrationRuntimeNodeOperations: - """IntegrationRuntimeNodeOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - async def get( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - node_name: str, - **kwargs - ) -> "models.SelfHostedIntegrationRuntimeNode": - """Gets a self-hosted integration runtime node. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param node_name: The integration runtime node name. - :type node_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: SelfHostedIntegrationRuntimeNode, or the result of cls(response) - :rtype: ~data_factory_management_client.models.SelfHostedIntegrationRuntimeNode - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.SelfHostedIntegrationRuntimeNode"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('SelfHostedIntegrationRuntimeNode', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}'} # type: ignore - - async def delete( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - node_name: str, - **kwargs - ) -> None: - """Deletes a self-hosted integration runtime node. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param node_name: The integration runtime node name. - :type node_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}'} # type: ignore - - async def update( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - node_name: str, - concurrent_jobs_limit: Optional[int] = None, - **kwargs - ) -> "models.SelfHostedIntegrationRuntimeNode": - """Updates a self-hosted integration runtime node. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param node_name: The integration runtime node name. - :type node_name: str - :param concurrent_jobs_limit: The number of concurrent jobs permitted to run on the integration - runtime node. Values between 1 and maxConcurrentJobs(inclusive) are allowed. - :type concurrent_jobs_limit: int - :keyword callable cls: A custom type or function that will be passed the direct response - :return: SelfHostedIntegrationRuntimeNode, or the result of cls(response) - :rtype: ~data_factory_management_client.models.SelfHostedIntegrationRuntimeNode - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.SelfHostedIntegrationRuntimeNode"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - update_integration_runtime_node_request = models.UpdateIntegrationRuntimeNodeRequest(concurrent_jobs_limit=concurrent_jobs_limit) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(update_integration_runtime_node_request, 'UpdateIntegrationRuntimeNodeRequest') - body_content_kwargs['content'] = body_content - request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('SelfHostedIntegrationRuntimeNode', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}'} # type: ignore - - async def get_ip_address( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - node_name: str, - **kwargs - ) -> "models.IntegrationRuntimeNodeIpAddress": - """Get the IP address of self-hosted integration runtime node. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param node_name: The integration runtime node name. - :type node_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeNodeIpAddress, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeNodeIpAddress - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeNodeIpAddress"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get_ip_address.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('IntegrationRuntimeNodeIpAddress', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get_ip_address.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}/ipAddress'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_object_metadata_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_object_metadata_operations_async.py deleted file mode 100644 index 70df0716c21..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_object_metadata_operations_async.py +++ /dev/null @@ -1,230 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.mgmt.core.exceptions import ARMErrorFormat -from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling - -from ... import models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class IntegrationRuntimeObjectMetadataOperations: - """IntegrationRuntimeObjectMetadataOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - async def _refresh_initial( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs - ) -> Optional["models.SsisObjectMetadataStatusResponse"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.SsisObjectMetadataStatusResponse"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self._refresh_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('SsisObjectMetadataStatusResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - _refresh_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/refreshObjectMetadata'} # type: ignore - - async def begin_refresh( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs - ) -> AsyncLROPoller["models.SsisObjectMetadataStatusResponse"]: - """Refresh a SSIS integration runtime object metadata. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either SsisObjectMetadataStatusResponse or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~data_factory_management_client.models.SsisObjectMetadataStatusResponse] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.SsisObjectMetadataStatusResponse"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = await self._refresh_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - integration_runtime_name=integration_runtime_name, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize('SsisObjectMetadataStatusResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling - if cont_token: - return AsyncLROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_refresh.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/refreshObjectMetadata'} # type: ignore - - async def get( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - metadata_path: Optional[str] = None, - **kwargs - ) -> "models.SsisObjectMetadataListResponse": - """Get a SSIS integration runtime object metadata by specified path. The return is pageable - metadata list. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param metadata_path: Metadata path. - :type metadata_path: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: SsisObjectMetadataListResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.SsisObjectMetadataListResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.SsisObjectMetadataListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - get_metadata_request = models.GetSsisObjectMetadataRequest(metadata_path=metadata_path) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - if get_metadata_request is not None: - body_content = self._serialize.body(get_metadata_request, 'GetSsisObjectMetadataRequest') - else: - body_content = None - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('SsisObjectMetadataListResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getObjectMetadata'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_operations_async.py deleted file mode 100644 index 82b285c7a74..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_operations_async.py +++ /dev/null @@ -1,1176 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union -import warnings - -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.mgmt.core.exceptions import ARMErrorFormat -from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling - -from ... import models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class IntegrationRuntimeOperations: - """IntegrationRuntimeOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list_by_factory( - self, - resource_group_name: str, - factory_name: str, - **kwargs - ) -> AsyncIterable["models.IntegrationRuntimeListResponse"]: - """Lists integration runtimes. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either IntegrationRuntimeListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.IntegrationRuntimeListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - async def extract_data(pipeline_response): - deserialized = self._deserialize('IntegrationRuntimeListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes'} # type: ignore - - async def create_or_update( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - properties: "models.IntegrationRuntime", - if_match: Optional[str] = None, - **kwargs - ) -> "models.IntegrationRuntimeResource": - """Creates or updates an integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param properties: Integration runtime properties. - :type properties: ~data_factory_management_client.models.IntegrationRuntime - :param if_match: ETag of the integration runtime entity. Should only be specified for update, - for which it should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - integration_runtime = models.IntegrationRuntimeResource(properties=properties) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(integration_runtime, 'IntegrationRuntimeResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('IntegrationRuntimeResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore - - async def get( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - if_none_match: Optional[str] = None, - **kwargs - ) -> Optional["models.IntegrationRuntimeResource"]: - """Gets an integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param if_none_match: ETag of the integration runtime entity. Should only be specified for get. - If the ETag matches the existing entity tag, or if * was provided, then no content will be - returned. - :type if_none_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeResource or None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.IntegrationRuntimeResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 304]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('IntegrationRuntimeResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore - - async def update( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - auto_update: Optional[Union[str, "models.IntegrationRuntimeAutoUpdate"]] = None, - update_delay_offset: Optional[str] = None, - **kwargs - ) -> "models.IntegrationRuntimeResource": - """Updates an integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param auto_update: Enables or disables the auto-update feature of the self-hosted integration - runtime. See https://go.microsoft.com/fwlink/?linkid=854189. - :type auto_update: str or ~data_factory_management_client.models.IntegrationRuntimeAutoUpdate - :param update_delay_offset: The time offset (in hours) in the day, e.g., PT03H is 3 hours. The - integration runtime auto update will happen on that time. - :type update_delay_offset: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - update_integration_runtime_request = models.UpdateIntegrationRuntimeRequest(auto_update=auto_update, update_delay_offset=update_delay_offset) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(update_integration_runtime_request, 'UpdateIntegrationRuntimeRequest') - body_content_kwargs['content'] = body_content - request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('IntegrationRuntimeResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore - - async def delete( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs - ) -> None: - """Deletes an integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore - - async def get_status( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs - ) -> "models.IntegrationRuntimeStatusResponse": - """Gets detailed status information for an integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeStatusResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeStatusResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get_status.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getStatus'} # type: ignore - - async def get_connection_info( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs - ) -> "models.IntegrationRuntimeConnectionInfo": - """Gets the on-premises integration runtime connection information for encrypting the on-premises - data source credentials. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeConnectionInfo, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeConnectionInfo - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeConnectionInfo"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get_connection_info.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('IntegrationRuntimeConnectionInfo', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get_connection_info.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getConnectionInfo'} # type: ignore - - async def regenerate_auth_key( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - key_name: Optional[Union[str, "models.IntegrationRuntimeAuthKeyName"]] = None, - **kwargs - ) -> "models.IntegrationRuntimeAuthKeys": - """Regenerates the authentication key for an integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param key_name: The name of the authentication key to regenerate. - :type key_name: str or ~data_factory_management_client.models.IntegrationRuntimeAuthKeyName - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeAuthKeys, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeAuthKeys - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeAuthKeys"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - regenerate_key_parameters = models.IntegrationRuntimeRegenerateKeyParameters(key_name=key_name) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.regenerate_auth_key.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(regenerate_key_parameters, 'IntegrationRuntimeRegenerateKeyParameters') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('IntegrationRuntimeAuthKeys', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - regenerate_auth_key.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/regenerateAuthKey'} # type: ignore - - async def list_auth_key( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs - ) -> "models.IntegrationRuntimeAuthKeys": - """Retrieves the authentication keys for an integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeAuthKeys, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeAuthKeys - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeAuthKeys"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.list_auth_key.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('IntegrationRuntimeAuthKeys', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - list_auth_key.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/listAuthKeys'} # type: ignore - - async def _start_initial( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs - ) -> Optional["models.IntegrationRuntimeStatusResponse"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.IntegrationRuntimeStatusResponse"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self._start_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - _start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/start'} # type: ignore - - async def begin_start( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs - ) -> AsyncLROPoller["models.IntegrationRuntimeStatusResponse"]: - """Starts a ManagedReserved type integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either IntegrationRuntimeStatusResponse or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~data_factory_management_client.models.IntegrationRuntimeStatusResponse] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = await self._start_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - integration_runtime_name=integration_runtime_name, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling - if cont_token: - return AsyncLROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/start'} # type: ignore - - async def _stop_initial( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs - ) -> None: - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self._stop_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - _stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/stop'} # type: ignore - - async def begin_stop( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs - ) -> AsyncLROPoller[None]: - """Stops a ManagedReserved type integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = await self._stop_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - integration_runtime_name=integration_runtime_name, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - if cls: - return cls(pipeline_response, None, {}) - - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling - if cont_token: - return AsyncLROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/stop'} # type: ignore - - async def sync_credentials( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs - ) -> None: - """Force the integration runtime to synchronize credentials across integration runtime nodes, and - this will override the credentials across all worker nodes with those available on the - dispatcher node. If you already have the latest credential backup file, you should manually - import it (preferred) on any self-hosted integration runtime node than using this API directly. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.sync_credentials.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - sync_credentials.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/syncCredentials'} # type: ignore - - async def get_monitoring_data( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs - ) -> "models.IntegrationRuntimeMonitoringData": - """Get the integration runtime monitoring data, which includes the monitor data for all the nodes - under this integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeMonitoringData, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeMonitoringData - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeMonitoringData"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get_monitoring_data.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('IntegrationRuntimeMonitoringData', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get_monitoring_data.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/monitoringData'} # type: ignore - - async def upgrade( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs - ) -> None: - """Upgrade self-hosted integration runtime to latest version if availability. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.upgrade.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - upgrade.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/upgrade'} # type: ignore - - async def remove_link( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - linked_factory_name: str, - **kwargs - ) -> None: - """Remove all linked integration runtimes under specific data factory in a self-hosted integration - runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param linked_factory_name: The data factory name for linked integration runtime. - :type linked_factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - linked_integration_runtime_request = models.LinkedIntegrationRuntimeRequest(linked_factory_name=linked_factory_name) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.remove_link.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(linked_integration_runtime_request, 'LinkedIntegrationRuntimeRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - remove_link.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/removeLinks'} # type: ignore - - async def create_linked_integration_runtime( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - name: Optional[str] = None, - subscription_id: Optional[str] = None, - data_factory_name: Optional[str] = None, - data_factory_location: Optional[str] = None, - **kwargs - ) -> "models.IntegrationRuntimeStatusResponse": - """Create a linked integration runtime entry in a shared integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param name: The name of the linked integration runtime. - :type name: str - :param subscription_id: The ID of the subscription that the linked integration runtime belongs - to. - :type subscription_id: str - :param data_factory_name: The name of the data factory that the linked integration runtime - belongs to. - :type data_factory_name: str - :param data_factory_location: The location of the data factory that the linked integration - runtime belongs to. - :type data_factory_location: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeStatusResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeStatusResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - create_linked_integration_runtime_request = models.CreateLinkedIntegrationRuntimeRequest(name=name, subscription_id=subscription_id, data_factory_name=data_factory_name, data_factory_location=data_factory_location) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_linked_integration_runtime.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(create_linked_integration_runtime_request, 'CreateLinkedIntegrationRuntimeRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_linked_integration_runtime.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/linkedIntegrationRuntime'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_linked_service_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_linked_service_operations_async.py deleted file mode 100644 index 56e9e6f663a..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_linked_service_operations_async.py +++ /dev/null @@ -1,312 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar -import warnings - -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class LinkedServiceOperations: - """LinkedServiceOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list_by_factory( - self, - resource_group_name: str, - factory_name: str, - **kwargs - ) -> AsyncIterable["models.LinkedServiceListResponse"]: - """Lists linked services. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either LinkedServiceListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.LinkedServiceListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - async def extract_data(pipeline_response): - deserialized = self._deserialize('LinkedServiceListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices'} # type: ignore - - async def create_or_update( - self, - resource_group_name: str, - factory_name: str, - linked_service_name: str, - properties: "models.LinkedService", - if_match: Optional[str] = None, - **kwargs - ) -> "models.LinkedServiceResource": - """Creates or updates a linked service. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param linked_service_name: The linked service name. - :type linked_service_name: str - :param properties: Properties of linked service. - :type properties: ~data_factory_management_client.models.LinkedService - :param if_match: ETag of the linkedService entity. Should only be specified for update, for - which it should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: LinkedServiceResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.LinkedServiceResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - linked_service = models.LinkedServiceResource(properties=properties) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(linked_service, 'LinkedServiceResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('LinkedServiceResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}'} # type: ignore - - async def get( - self, - resource_group_name: str, - factory_name: str, - linked_service_name: str, - if_none_match: Optional[str] = None, - **kwargs - ) -> Optional["models.LinkedServiceResource"]: - """Gets a linked service. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param linked_service_name: The linked service name. - :type linked_service_name: str - :param if_none_match: ETag of the linked service entity. Should only be specified for get. If - the ETag matches the existing entity tag, or if * was provided, then no content will be - returned. - :type if_none_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: LinkedServiceResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.LinkedServiceResource or None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.LinkedServiceResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 304]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('LinkedServiceResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}'} # type: ignore - - async def delete( - self, - resource_group_name: str, - factory_name: str, - linked_service_name: str, - **kwargs - ) -> None: - """Deletes a linked service. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param linked_service_name: The linked service name. - :type linked_service_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_managed_private_endpoint_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_managed_private_endpoint_operations_async.py deleted file mode 100644 index 3a899779963..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_managed_private_endpoint_operations_async.py +++ /dev/null @@ -1,336 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, List, Optional, TypeVar -import warnings - -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class ManagedPrivateEndpointOperations: - """ManagedPrivateEndpointOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list_by_factory( - self, - resource_group_name: str, - factory_name: str, - managed_virtual_network_name: str, - **kwargs - ) -> AsyncIterable["models.ManagedPrivateEndpointListResponse"]: - """Lists managed private endpoints. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param managed_virtual_network_name: Managed virtual network name. - :type managed_virtual_network_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either ManagedPrivateEndpointListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.ManagedPrivateEndpointListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - async def extract_data(pipeline_response): - deserialized = self._deserialize('ManagedPrivateEndpointListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints'} # type: ignore - - async def create_or_update( - self, - resource_group_name: str, - factory_name: str, - managed_virtual_network_name: str, - managed_private_endpoint_name: str, - if_match: Optional[str] = None, - connection_state: Optional["models.ConnectionStateProperties"] = None, - fqdns: Optional[List[str]] = None, - group_id: Optional[str] = None, - private_link_resource_id: Optional[str] = None, - **kwargs - ) -> "models.ManagedPrivateEndpointResource": - """Creates or updates a managed private endpoint. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param managed_virtual_network_name: Managed virtual network name. - :type managed_virtual_network_name: str - :param managed_private_endpoint_name: Managed private endpoint name. - :type managed_private_endpoint_name: str - :param if_match: ETag of the managed private endpoint entity. Should only be specified for - update, for which it should match existing entity or can be * for unconditional update. - :type if_match: str - :param connection_state: The managed private endpoint connection state. - :type connection_state: ~data_factory_management_client.models.ConnectionStateProperties - :param fqdns: Fully qualified domain names. - :type fqdns: list[str] - :param group_id: The groupId to which the managed private endpoint is created. - :type group_id: str - :param private_link_resource_id: The ARM resource ID of the resource to which the managed - private endpoint is created. - :type private_link_resource_id: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ManagedPrivateEndpointResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.ManagedPrivateEndpointResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - managed_private_endpoint = models.ManagedPrivateEndpointResource(connection_state=connection_state, fqdns=fqdns, group_id=group_id, private_link_resource_id=private_link_resource_id) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - 'managedPrivateEndpointName': self._serialize.url("managed_private_endpoint_name", managed_private_endpoint_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(managed_private_endpoint, 'ManagedPrivateEndpointResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('ManagedPrivateEndpointResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}'} # type: ignore - - async def get( - self, - resource_group_name: str, - factory_name: str, - managed_virtual_network_name: str, - managed_private_endpoint_name: str, - if_none_match: Optional[str] = None, - **kwargs - ) -> "models.ManagedPrivateEndpointResource": - """Gets a managed private endpoint. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param managed_virtual_network_name: Managed virtual network name. - :type managed_virtual_network_name: str - :param managed_private_endpoint_name: Managed private endpoint name. - :type managed_private_endpoint_name: str - :param if_none_match: ETag of the managed private endpoint entity. Should only be specified for - get. If the ETag matches the existing entity tag, or if * was provided, then no content will be - returned. - :type if_none_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ManagedPrivateEndpointResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.ManagedPrivateEndpointResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - 'managedPrivateEndpointName': self._serialize.url("managed_private_endpoint_name", managed_private_endpoint_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('ManagedPrivateEndpointResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}'} # type: ignore - - async def delete( - self, - resource_group_name: str, - factory_name: str, - managed_virtual_network_name: str, - managed_private_endpoint_name: str, - **kwargs - ) -> None: - """Deletes a managed private endpoint. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param managed_virtual_network_name: Managed virtual network name. - :type managed_virtual_network_name: str - :param managed_private_endpoint_name: Managed private endpoint name. - :type managed_private_endpoint_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - 'managedPrivateEndpointName': self._serialize.url("managed_private_endpoint_name", managed_private_endpoint_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_managed_virtual_network_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_managed_virtual_network_operations_async.py deleted file mode 100644 index 2152988d7ef..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_managed_virtual_network_operations_async.py +++ /dev/null @@ -1,255 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar -import warnings - -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class ManagedVirtualNetworkOperations: - """ManagedVirtualNetworkOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list_by_factory( - self, - resource_group_name: str, - factory_name: str, - **kwargs - ) -> AsyncIterable["models.ManagedVirtualNetworkListResponse"]: - """Lists managed Virtual Networks. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either ManagedVirtualNetworkListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.ManagedVirtualNetworkListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - async def extract_data(pipeline_response): - deserialized = self._deserialize('ManagedVirtualNetworkListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks'} # type: ignore - - async def create_or_update( - self, - resource_group_name: str, - factory_name: str, - managed_virtual_network_name: str, - properties: "models.ManagedVirtualNetwork", - if_match: Optional[str] = None, - **kwargs - ) -> "models.ManagedVirtualNetworkResource": - """Creates or updates a managed Virtual Network. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param managed_virtual_network_name: Managed virtual network name. - :type managed_virtual_network_name: str - :param properties: Managed Virtual Network properties. - :type properties: ~data_factory_management_client.models.ManagedVirtualNetwork - :param if_match: ETag of the managed Virtual Network entity. Should only be specified for - update, for which it should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ManagedVirtualNetworkResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.ManagedVirtualNetworkResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - managed_virtual_network = models.ManagedVirtualNetworkResource(properties=properties) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(managed_virtual_network, 'ManagedVirtualNetworkResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('ManagedVirtualNetworkResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}'} # type: ignore - - async def get( - self, - resource_group_name: str, - factory_name: str, - managed_virtual_network_name: str, - if_none_match: Optional[str] = None, - **kwargs - ) -> "models.ManagedVirtualNetworkResource": - """Gets a managed Virtual Network. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param managed_virtual_network_name: Managed virtual network name. - :type managed_virtual_network_name: str - :param if_none_match: ETag of the managed Virtual Network entity. Should only be specified for - get. If the ETag matches the existing entity tag, or if * was provided, then no content will be - returned. - :type if_none_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ManagedVirtualNetworkResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.ManagedVirtualNetworkResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('ManagedVirtualNetworkResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_operation_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_operation_operations_async.py deleted file mode 100644 index 83206d77039..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_operation_operations_async.py +++ /dev/null @@ -1,101 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar -import warnings - -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class OperationOperations: - """OperationOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list( - self, - **kwargs - ) -> AsyncIterable["models.OperationListResponse"]: - """Lists the available Azure Data Factory API operations. - - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either OperationListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.OperationListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.OperationListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list.metadata['url'] # type: ignore - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - async def extract_data(pipeline_response): - deserialized = self._deserialize('OperationListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': '/providers/Microsoft.DataFactory/operations'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_pipeline_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_pipeline_operations_async.py deleted file mode 100644 index 34c7453f951..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_pipeline_operations_async.py +++ /dev/null @@ -1,405 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar -import warnings - -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class PipelineOperations: - """PipelineOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list_by_factory( - self, - resource_group_name: str, - factory_name: str, - **kwargs - ) -> AsyncIterable["models.PipelineListResponse"]: - """Lists pipelines. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either PipelineListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.PipelineListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - async def extract_data(pipeline_response): - deserialized = self._deserialize('PipelineListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines'} # type: ignore - - async def create_or_update( - self, - resource_group_name: str, - factory_name: str, - pipeline_name: str, - pipeline: "models.PipelineResource", - if_match: Optional[str] = None, - **kwargs - ) -> "models.PipelineResource": - """Creates or updates a pipeline. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param pipeline_name: The pipeline name. - :type pipeline_name: str - :param pipeline: Pipeline resource definition. - :type pipeline: ~data_factory_management_client.models.PipelineResource - :param if_match: ETag of the pipeline entity. Should only be specified for update, for which - it should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: PipelineResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.PipelineResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(pipeline, 'PipelineResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('PipelineResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}'} # type: ignore - - async def get( - self, - resource_group_name: str, - factory_name: str, - pipeline_name: str, - if_none_match: Optional[str] = None, - **kwargs - ) -> Optional["models.PipelineResource"]: - """Gets a pipeline. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param pipeline_name: The pipeline name. - :type pipeline_name: str - :param if_none_match: ETag of the pipeline entity. Should only be specified for get. If the - ETag matches the existing entity tag, or if * was provided, then no content will be returned. - :type if_none_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: PipelineResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.PipelineResource or None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.PipelineResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 304]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('PipelineResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}'} # type: ignore - - async def delete( - self, - resource_group_name: str, - factory_name: str, - pipeline_name: str, - **kwargs - ) -> None: - """Deletes a pipeline. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param pipeline_name: The pipeline name. - :type pipeline_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}'} # type: ignore - - async def create_run( - self, - resource_group_name: str, - factory_name: str, - pipeline_name: str, - reference_pipeline_run_id: Optional[str] = None, - is_recovery: Optional[bool] = None, - start_activity_name: Optional[str] = None, - start_from_failure: Optional[bool] = None, - parameters: Optional[Dict[str, object]] = None, - **kwargs - ) -> "models.CreateRunResponse": - """Creates a run of a pipeline. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param pipeline_name: The pipeline name. - :type pipeline_name: str - :param reference_pipeline_run_id: The pipeline run identifier. If run ID is specified the - parameters of the specified run will be used to create a new run. - :type reference_pipeline_run_id: str - :param is_recovery: Recovery mode flag. If recovery mode is set to true, the specified - referenced pipeline run and the new run will be grouped under the same groupId. - :type is_recovery: bool - :param start_activity_name: In recovery mode, the rerun will start from this activity. If not - specified, all activities will run. - :type start_activity_name: str - :param start_from_failure: In recovery mode, if set to true, the rerun will start from failed - activities. The property will be used only if startActivityName is not specified. - :type start_from_failure: bool - :param parameters: Parameters of the pipeline run. These parameters will be used only if the - runId is not specified. - :type parameters: dict[str, object] - :keyword callable cls: A custom type or function that will be passed the direct response - :return: CreateRunResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.CreateRunResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.CreateRunResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_run.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - if reference_pipeline_run_id is not None: - query_parameters['referencePipelineRunId'] = self._serialize.query("reference_pipeline_run_id", reference_pipeline_run_id, 'str') - if is_recovery is not None: - query_parameters['isRecovery'] = self._serialize.query("is_recovery", is_recovery, 'bool') - if start_activity_name is not None: - query_parameters['startActivityName'] = self._serialize.query("start_activity_name", start_activity_name, 'str') - if start_from_failure is not None: - query_parameters['startFromFailure'] = self._serialize.query("start_from_failure", start_from_failure, 'bool') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - if parameters is not None: - body_content = self._serialize.body(parameters, '{object}') - else: - body_content = None - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('CreateRunResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_run.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}/createRun'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_pipeline_run_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_pipeline_run_operations_async.py deleted file mode 100644 index 5cdfd09fe01..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_pipeline_run_operations_async.py +++ /dev/null @@ -1,243 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -import datetime -from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class PipelineRunOperations: - """PipelineRunOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - async def query_by_factory( - self, - resource_group_name: str, - factory_name: str, - last_updated_after: datetime.datetime, - last_updated_before: datetime.datetime, - continuation_token_parameter: Optional[str] = None, - filters: Optional[List["models.RunQueryFilter"]] = None, - order_by: Optional[List["models.RunQueryOrderBy"]] = None, - **kwargs - ) -> "models.PipelineRunsQueryResponse": - """Query pipeline runs in the factory based on input filter conditions. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param last_updated_after: The time at or after which the run event was updated in 'ISO 8601' - format. - :type last_updated_after: ~datetime.datetime - :param last_updated_before: The time at or before which the run event was updated in 'ISO 8601' - format. - :type last_updated_before: ~datetime.datetime - :param continuation_token_parameter: The continuation token for getting the next page of - results. Null for first page. - :type continuation_token_parameter: str - :param filters: List of filters. - :type filters: list[~data_factory_management_client.models.RunQueryFilter] - :param order_by: List of OrderBy option. - :type order_by: list[~data_factory_management_client.models.RunQueryOrderBy] - :keyword callable cls: A custom type or function that will be passed the direct response - :return: PipelineRunsQueryResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.PipelineRunsQueryResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineRunsQueryResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - filter_parameters = models.RunFilterParameters(continuation_token=continuation_token_parameter, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, order_by=order_by) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.query_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('PipelineRunsQueryResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryPipelineRuns'} # type: ignore - - async def get( - self, - resource_group_name: str, - factory_name: str, - run_id: str, - **kwargs - ) -> "models.PipelineRun": - """Get a pipeline run by its run ID. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param run_id: The pipeline run identifier. - :type run_id: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: PipelineRun, or the result of cls(response) - :rtype: ~data_factory_management_client.models.PipelineRun - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineRun"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'runId': self._serialize.url("run_id", run_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('PipelineRun', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}'} # type: ignore - - async def cancel( - self, - resource_group_name: str, - factory_name: str, - run_id: str, - is_recursive: Optional[bool] = None, - **kwargs - ) -> None: - """Cancel a pipeline run by its run ID. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param run_id: The pipeline run identifier. - :type run_id: str - :param is_recursive: If true, cancel all the Child pipelines that are triggered by the current - pipeline. - :type is_recursive: bool - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.cancel.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'runId': self._serialize.url("run_id", run_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if is_recursive is not None: - query_parameters['isRecursive'] = self._serialize.query("is_recursive", is_recursive, 'bool') - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - cancel.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}/cancel'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_operations_async.py deleted file mode 100644 index f4669b45bc2..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_operations_async.py +++ /dev/null @@ -1,877 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union -import warnings - -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.mgmt.core.exceptions import ARMErrorFormat -from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling - -from ... import models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class TriggerOperations: - """TriggerOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list_by_factory( - self, - resource_group_name: str, - factory_name: str, - **kwargs - ) -> AsyncIterable["models.TriggerListResponse"]: - """Lists triggers. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either TriggerListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.TriggerListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - async def extract_data(pipeline_response): - deserialized = self._deserialize('TriggerListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers'} # type: ignore - - async def query_by_factory( - self, - resource_group_name: str, - factory_name: str, - continuation_token_parameter: Optional[str] = None, - parent_trigger_name: Optional[str] = None, - **kwargs - ) -> "models.TriggerQueryResponse": - """Query triggers. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param continuation_token_parameter: The continuation token for getting the next page of - results. Null for first page. - :type continuation_token_parameter: str - :param parent_trigger_name: The name of the parent TumblingWindowTrigger to get the child rerun - triggers. - :type parent_trigger_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: TriggerQueryResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.TriggerQueryResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerQueryResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - filter_parameters = models.TriggerFilterParameters(continuation_token=continuation_token_parameter, parent_trigger_name=parent_trigger_name) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.query_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(filter_parameters, 'TriggerFilterParameters') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('TriggerQueryResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/querytriggers'} # type: ignore - - async def create_or_update( - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - properties: "models.Trigger", - if_match: Optional[str] = None, - **kwargs - ) -> "models.TriggerResource": - """Creates or updates a trigger. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :param properties: Properties of the trigger. - :type properties: ~data_factory_management_client.models.Trigger - :param if_match: ETag of the trigger entity. Should only be specified for update, for which it - should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: TriggerResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.TriggerResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - trigger = models.TriggerResource(properties=properties) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(trigger, 'TriggerResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('TriggerResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}'} # type: ignore - - async def get( - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - if_none_match: Optional[str] = None, - **kwargs - ) -> Optional["models.TriggerResource"]: - """Gets a trigger. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :param if_none_match: ETag of the trigger entity. Should only be specified for get. If the ETag - matches the existing entity tag, or if * was provided, then no content will be returned. - :type if_none_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: TriggerResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.TriggerResource or None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 304]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('TriggerResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}'} # type: ignore - - async def delete( - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - **kwargs - ) -> None: - """Deletes a trigger. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}'} # type: ignore - - async def _subscribe_to_event_initial( - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - **kwargs - ) -> Optional["models.TriggerSubscriptionOperationStatus"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerSubscriptionOperationStatus"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self._subscribe_to_event_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - _subscribe_to_event_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents'} # type: ignore - - async def begin_subscribe_to_event( - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - **kwargs - ) -> AsyncLROPoller["models.TriggerSubscriptionOperationStatus"]: - """Subscribe event trigger to events. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either TriggerSubscriptionOperationStatus or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~data_factory_management_client.models.TriggerSubscriptionOperationStatus] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = await self._subscribe_to_event_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - trigger_name=trigger_name, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling - if cont_token: - return AsyncLROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_subscribe_to_event.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents'} # type: ignore - - async def get_event_subscription_status( - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - **kwargs - ) -> "models.TriggerSubscriptionOperationStatus": - """Get a trigger's event subscription status. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: TriggerSubscriptionOperationStatus, or the result of cls(response) - :rtype: ~data_factory_management_client.models.TriggerSubscriptionOperationStatus - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get_event_subscription_status.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get_event_subscription_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/getEventSubscriptionStatus'} # type: ignore - - async def _unsubscribe_from_event_initial( - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - **kwargs - ) -> Optional["models.TriggerSubscriptionOperationStatus"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerSubscriptionOperationStatus"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self._unsubscribe_from_event_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - _unsubscribe_from_event_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents'} # type: ignore - - async def begin_unsubscribe_from_event( - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - **kwargs - ) -> AsyncLROPoller["models.TriggerSubscriptionOperationStatus"]: - """Unsubscribe event trigger from events. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either TriggerSubscriptionOperationStatus or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~data_factory_management_client.models.TriggerSubscriptionOperationStatus] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = await self._unsubscribe_from_event_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - trigger_name=trigger_name, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling - if cont_token: - return AsyncLROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_unsubscribe_from_event.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents'} # type: ignore - - async def _start_initial( - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - **kwargs - ) -> None: - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self._start_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - _start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/start'} # type: ignore - - async def begin_start( - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - **kwargs - ) -> AsyncLROPoller[None]: - """Starts a trigger. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = await self._start_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - trigger_name=trigger_name, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - if cls: - return cls(pipeline_response, None, {}) - - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling - if cont_token: - return AsyncLROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/start'} # type: ignore - - async def _stop_initial( - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - **kwargs - ) -> None: - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self._stop_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - _stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/stop'} # type: ignore - - async def begin_stop( - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - **kwargs - ) -> AsyncLROPoller[None]: - """Stops a trigger. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = await self._stop_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - trigger_name=trigger_name, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - if cls: - return cls(pipeline_response, None, {}) - - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling - if cont_token: - return AsyncLROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/stop'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_run_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_run_operations_async.py deleted file mode 100644 index 3401f9c95c1..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_run_operations_async.py +++ /dev/null @@ -1,241 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -import datetime -from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class TriggerRunOperations: - """TriggerRunOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - async def rerun( - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - run_id: str, - **kwargs - ) -> None: - """Rerun single trigger instance by runId. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :param run_id: The pipeline run identifier. - :type run_id: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.rerun.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - 'runId': self._serialize.url("run_id", run_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - rerun.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/triggerRuns/{runId}/rerun'} # type: ignore - - async def cancel( - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - run_id: str, - **kwargs - ) -> None: - """Cancel a single trigger instance by runId. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :param run_id: The pipeline run identifier. - :type run_id: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.cancel.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - 'runId': self._serialize.url("run_id", run_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - cancel.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/triggerRuns/{runId}/cancel'} # type: ignore - - async def query_by_factory( - self, - resource_group_name: str, - factory_name: str, - last_updated_after: datetime.datetime, - last_updated_before: datetime.datetime, - continuation_token_parameter: Optional[str] = None, - filters: Optional[List["models.RunQueryFilter"]] = None, - order_by: Optional[List["models.RunQueryOrderBy"]] = None, - **kwargs - ) -> "models.TriggerRunsQueryResponse": - """Query trigger runs. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param last_updated_after: The time at or after which the run event was updated in 'ISO 8601' - format. - :type last_updated_after: ~datetime.datetime - :param last_updated_before: The time at or before which the run event was updated in 'ISO 8601' - format. - :type last_updated_before: ~datetime.datetime - :param continuation_token_parameter: The continuation token for getting the next page of - results. Null for first page. - :type continuation_token_parameter: str - :param filters: List of filters. - :type filters: list[~data_factory_management_client.models.RunQueryFilter] - :param order_by: List of OrderBy option. - :type order_by: list[~data_factory_management_client.models.RunQueryOrderBy] - :keyword callable cls: A custom type or function that will be passed the direct response - :return: TriggerRunsQueryResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.TriggerRunsQueryResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerRunsQueryResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - filter_parameters = models.RunFilterParameters(continuation_token=continuation_token_parameter, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, order_by=order_by) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.query_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('TriggerRunsQueryResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryTriggerRuns'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/__init__.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/__init__.py index 1f1ab102631..d558e88e00d 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/__init__.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/__init__.py @@ -157,6 +157,9 @@ from ._models_py3 import CreateDataFlowDebugSessionResponse from ._models_py3 import CreateLinkedIntegrationRuntimeRequest from ._models_py3 import CreateRunResponse + from ._models_py3 import Credential + from ._models_py3 import CredentialReference + from ._models_py3 import CredentialResource from ._models_py3 import CustomActivity from ._models_py3 import CustomActivityReferenceObject from ._models_py3 import CustomDataSourceLinkedService @@ -277,6 +280,7 @@ from ._models_py3 import GetSsisObjectMetadataRequest from ._models_py3 import GitHubAccessTokenRequest from ._models_py3 import GitHubAccessTokenResponse + from ._models_py3 import GitHubClientSecret from ._models_py3 import GlobalParameterSpecification from ._models_py3 import GoogleAdWordsLinkedService from ._models_py3 import GoogleAdWordsObjectDataset @@ -336,6 +340,10 @@ from ._models_py3 import IntegrationRuntimeMonitoringData from ._models_py3 import IntegrationRuntimeNodeIpAddress from ._models_py3 import IntegrationRuntimeNodeMonitoringData + from ._models_py3 import IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint + from ._models_py3 import IntegrationRuntimeOutboundNetworkDependenciesEndpoint + from ._models_py3 import IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails + from ._models_py3 import IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse from ._models_py3 import IntegrationRuntimeReference from ._models_py3 import IntegrationRuntimeRegenerateKeyParameters from ._models_py3 import IntegrationRuntimeResource @@ -371,6 +379,7 @@ from ._models_py3 import MagentoLinkedService from ._models_py3 import MagentoObjectDataset from ._models_py3 import MagentoSource + from ._models_py3 import ManagedIdentityCredential from ._models_py3 import ManagedIntegrationRuntime from ._models_py3 import ManagedIntegrationRuntimeError from ._models_py3 import ManagedIntegrationRuntimeNode @@ -390,12 +399,14 @@ from ._models_py3 import MarketoLinkedService from ._models_py3 import MarketoObjectDataset from ._models_py3 import MarketoSource + from ._models_py3 import MetadataItem from ._models_py3 import MicrosoftAccessLinkedService from ._models_py3 import MicrosoftAccessSink from ._models_py3 import MicrosoftAccessSource from ._models_py3 import MicrosoftAccessTableDataset from ._models_py3 import MongoDbAtlasCollectionDataset from ._models_py3 import MongoDbAtlasLinkedService + from ._models_py3 import MongoDbAtlasSink from ._models_py3 import MongoDbAtlasSource from ._models_py3 import MongoDbCollectionDataset from ._models_py3 import MongoDbCursorMethodsProperties @@ -403,6 +414,7 @@ from ._models_py3 import MongoDbSource from ._models_py3 import MongoDbV2CollectionDataset from ._models_py3 import MongoDbV2LinkedService + from ._models_py3 import MongoDbV2Sink from ._models_py3 import MongoDbV2Source from ._models_py3 import MultiplePipelineTrigger from ._models_py3 import MySqlLinkedService @@ -551,6 +563,7 @@ from ._models_py3 import ServiceNowLinkedService from ._models_py3 import ServiceNowObjectDataset from ._models_py3 import ServiceNowSource + from ._models_py3 import ServicePrincipalCredential from ._models_py3 import SetVariableActivity from ._models_py3 import SftpLocation from ._models_py3 import SftpReadSettings @@ -575,6 +588,7 @@ from ._models_py3 import SqlAlwaysEncryptedProperties from ._models_py3 import SqlDwSink from ._models_py3 import SqlDwSource + from ._models_py3 import SqlDwUpsertSettings from ._models_py3 import SqlMiSink from ._models_py3 import SqlMiSource from ._models_py3 import SqlPartitionSettings @@ -585,6 +599,7 @@ from ._models_py3 import SqlServerTableDataset from ._models_py3 import SqlSink from ._models_py3 import SqlSource + from ._models_py3 import SqlUpsertSettings from ._models_py3 import SquareLinkedService from ._models_py3 import SquareObjectDataset from ._models_py3 import SquareSource @@ -822,6 +837,9 @@ from ._models import CreateDataFlowDebugSessionResponse # type: ignore from ._models import CreateLinkedIntegrationRuntimeRequest # type: ignore from ._models import CreateRunResponse # type: ignore + from ._models import Credential # type: ignore + from ._models import CredentialReference # type: ignore + from ._models import CredentialResource # type: ignore from ._models import CustomActivity # type: ignore from ._models import CustomActivityReferenceObject # type: ignore from ._models import CustomDataSourceLinkedService # type: ignore @@ -942,6 +960,7 @@ from ._models import GetSsisObjectMetadataRequest # type: ignore from ._models import GitHubAccessTokenRequest # type: ignore from ._models import GitHubAccessTokenResponse # type: ignore + from ._models import GitHubClientSecret # type: ignore from ._models import GlobalParameterSpecification # type: ignore from ._models import GoogleAdWordsLinkedService # type: ignore from ._models import GoogleAdWordsObjectDataset # type: ignore @@ -1001,6 +1020,10 @@ from ._models import IntegrationRuntimeMonitoringData # type: ignore from ._models import IntegrationRuntimeNodeIpAddress # type: ignore from ._models import IntegrationRuntimeNodeMonitoringData # type: ignore + from ._models import IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint # type: ignore + from ._models import IntegrationRuntimeOutboundNetworkDependenciesEndpoint # type: ignore + from ._models import IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails # type: ignore + from ._models import IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse # type: ignore from ._models import IntegrationRuntimeReference # type: ignore from ._models import IntegrationRuntimeRegenerateKeyParameters # type: ignore from ._models import IntegrationRuntimeResource # type: ignore @@ -1036,6 +1059,7 @@ from ._models import MagentoLinkedService # type: ignore from ._models import MagentoObjectDataset # type: ignore from ._models import MagentoSource # type: ignore + from ._models import ManagedIdentityCredential # type: ignore from ._models import ManagedIntegrationRuntime # type: ignore from ._models import ManagedIntegrationRuntimeError # type: ignore from ._models import ManagedIntegrationRuntimeNode # type: ignore @@ -1055,12 +1079,14 @@ from ._models import MarketoLinkedService # type: ignore from ._models import MarketoObjectDataset # type: ignore from ._models import MarketoSource # type: ignore + from ._models import MetadataItem # type: ignore from ._models import MicrosoftAccessLinkedService # type: ignore from ._models import MicrosoftAccessSink # type: ignore from ._models import MicrosoftAccessSource # type: ignore from ._models import MicrosoftAccessTableDataset # type: ignore from ._models import MongoDbAtlasCollectionDataset # type: ignore from ._models import MongoDbAtlasLinkedService # type: ignore + from ._models import MongoDbAtlasSink # type: ignore from ._models import MongoDbAtlasSource # type: ignore from ._models import MongoDbCollectionDataset # type: ignore from ._models import MongoDbCursorMethodsProperties # type: ignore @@ -1068,6 +1094,7 @@ from ._models import MongoDbSource # type: ignore from ._models import MongoDbV2CollectionDataset # type: ignore from ._models import MongoDbV2LinkedService # type: ignore + from ._models import MongoDbV2Sink # type: ignore from ._models import MongoDbV2Source # type: ignore from ._models import MultiplePipelineTrigger # type: ignore from ._models import MySqlLinkedService # type: ignore @@ -1216,6 +1243,7 @@ from ._models import ServiceNowLinkedService # type: ignore from ._models import ServiceNowObjectDataset # type: ignore from ._models import ServiceNowSource # type: ignore + from ._models import ServicePrincipalCredential # type: ignore from ._models import SetVariableActivity # type: ignore from ._models import SftpLocation # type: ignore from ._models import SftpReadSettings # type: ignore @@ -1240,6 +1268,7 @@ from ._models import SqlAlwaysEncryptedProperties # type: ignore from ._models import SqlDwSink # type: ignore from ._models import SqlDwSource # type: ignore + from ._models import SqlDwUpsertSettings # type: ignore from ._models import SqlMiSink # type: ignore from ._models import SqlMiSource # type: ignore from ._models import SqlPartitionSettings # type: ignore @@ -1250,6 +1279,7 @@ from ._models import SqlServerTableDataset # type: ignore from ._models import SqlSink # type: ignore from ._models import SqlSource # type: ignore + from ._models import SqlUpsertSettings # type: ignore from ._models import SquareLinkedService # type: ignore from ._models import SquareObjectDataset # type: ignore from ._models import SquareSource # type: ignore @@ -1356,7 +1386,6 @@ DependencyCondition, DynamicsAuthenticationType, DynamicsDeploymentType, - DynamicsServicePrincipalCredentialType, DynamicsSinkWriteBehavior, EventSubscriptionStatus, FactoryIdentityType, @@ -1410,12 +1439,15 @@ SapTablePartitionOption, SelfHostedIntegrationRuntimeNodeStatus, ServiceNowAuthenticationType, + ServicePrincipalCredentialType, SftpAuthenticationType, SparkAuthenticationType, SparkServerType, SparkThriftTransportProtocol, SqlAlwaysEncryptedAkvAuthType, + SqlDwWriteBehaviorEnum, SqlPartitionOption, + SqlWriteBehaviorEnum, SsisLogLocationType, SsisObjectMetadataType, SsisPackageLocationType, @@ -1583,6 +1615,9 @@ 'CreateDataFlowDebugSessionResponse', 'CreateLinkedIntegrationRuntimeRequest', 'CreateRunResponse', + 'Credential', + 'CredentialReference', + 'CredentialResource', 'CustomActivity', 'CustomActivityReferenceObject', 'CustomDataSourceLinkedService', @@ -1703,6 +1738,7 @@ 'GetSsisObjectMetadataRequest', 'GitHubAccessTokenRequest', 'GitHubAccessTokenResponse', + 'GitHubClientSecret', 'GlobalParameterSpecification', 'GoogleAdWordsLinkedService', 'GoogleAdWordsObjectDataset', @@ -1762,6 +1798,10 @@ 'IntegrationRuntimeMonitoringData', 'IntegrationRuntimeNodeIpAddress', 'IntegrationRuntimeNodeMonitoringData', + 'IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint', + 'IntegrationRuntimeOutboundNetworkDependenciesEndpoint', + 'IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails', + 'IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse', 'IntegrationRuntimeReference', 'IntegrationRuntimeRegenerateKeyParameters', 'IntegrationRuntimeResource', @@ -1797,6 +1837,7 @@ 'MagentoLinkedService', 'MagentoObjectDataset', 'MagentoSource', + 'ManagedIdentityCredential', 'ManagedIntegrationRuntime', 'ManagedIntegrationRuntimeError', 'ManagedIntegrationRuntimeNode', @@ -1816,12 +1857,14 @@ 'MarketoLinkedService', 'MarketoObjectDataset', 'MarketoSource', + 'MetadataItem', 'MicrosoftAccessLinkedService', 'MicrosoftAccessSink', 'MicrosoftAccessSource', 'MicrosoftAccessTableDataset', 'MongoDbAtlasCollectionDataset', 'MongoDbAtlasLinkedService', + 'MongoDbAtlasSink', 'MongoDbAtlasSource', 'MongoDbCollectionDataset', 'MongoDbCursorMethodsProperties', @@ -1829,6 +1872,7 @@ 'MongoDbSource', 'MongoDbV2CollectionDataset', 'MongoDbV2LinkedService', + 'MongoDbV2Sink', 'MongoDbV2Source', 'MultiplePipelineTrigger', 'MySqlLinkedService', @@ -1977,6 +2021,7 @@ 'ServiceNowLinkedService', 'ServiceNowObjectDataset', 'ServiceNowSource', + 'ServicePrincipalCredential', 'SetVariableActivity', 'SftpLocation', 'SftpReadSettings', @@ -2001,6 +2046,7 @@ 'SqlAlwaysEncryptedProperties', 'SqlDwSink', 'SqlDwSource', + 'SqlDwUpsertSettings', 'SqlMiSink', 'SqlMiSource', 'SqlPartitionSettings', @@ -2011,6 +2057,7 @@ 'SqlServerTableDataset', 'SqlSink', 'SqlSource', + 'SqlUpsertSettings', 'SquareLinkedService', 'SquareObjectDataset', 'SquareSource', @@ -2115,7 +2162,6 @@ 'DependencyCondition', 'DynamicsAuthenticationType', 'DynamicsDeploymentType', - 'DynamicsServicePrincipalCredentialType', 'DynamicsSinkWriteBehavior', 'EventSubscriptionStatus', 'FactoryIdentityType', @@ -2169,12 +2215,15 @@ 'SapTablePartitionOption', 'SelfHostedIntegrationRuntimeNodeStatus', 'ServiceNowAuthenticationType', + 'ServicePrincipalCredentialType', 'SftpAuthenticationType', 'SparkAuthenticationType', 'SparkServerType', 'SparkThriftTransportProtocol', 'SqlAlwaysEncryptedAkvAuthType', + 'SqlDwWriteBehaviorEnum', 'SqlPartitionOption', + 'SqlWriteBehaviorEnum', 'SsisLogLocationType', 'SsisObjectMetadataType', 'SsisPackageLocationType', diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_data_factory_management_client_enums.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_data_factory_management_client_enums.py index 1e1c0d92c7d..4d250610be9 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_data_factory_management_client_enums.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_data_factory_management_client_enums.py @@ -77,14 +77,16 @@ class CassandraSourceReadConsistencyLevels(with_metaclass(_CaseInsensitiveEnumMe LOCAL_SERIAL = "LOCAL_SERIAL" class CompressionCodec(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """All available compressionCodec values. + """ NONE = "none" - GZIP = "gzip" - SNAPPY = "snappy" LZO = "lzo" BZIP2 = "bzip2" + GZIP = "gzip" DEFLATE = "deflate" ZIP_DEFLATE = "zipDeflate" + SNAPPY = "snappy" LZ4 = "lz4" TAR = "tar" TAR_G_ZIP = "tarGZip" @@ -174,9 +176,7 @@ class DependencyCondition(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): COMPLETED = "Completed" class DynamicsAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """The authentication type to connect to Dynamics server. 'Office365' for online scenario, 'Ifd' - for on-premises with Ifd scenario, 'AADServicePrincipal' for Server-To-Server authentication in - online scenario. Type: string (or Expression with resultType string). + """All available dynamicsAuthenticationType values. """ OFFICE365 = "Office365" @@ -184,23 +184,12 @@ class DynamicsAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, E AAD_SERVICE_PRINCIPAL = "AADServicePrincipal" class DynamicsDeploymentType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """The deployment type of the Dynamics instance. 'Online' for Dynamics Online and - 'OnPremisesWithIfd' for Dynamics on-premises with Ifd. Type: string (or Expression with - resultType string). + """All available dynamicsDeploymentType values. """ ONLINE = "Online" ON_PREMISES_WITH_IFD = "OnPremisesWithIfd" -class DynamicsServicePrincipalCredentialType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """The service principal credential type to use in Server-To-Server authentication. - 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or - Expression with resultType string). - """ - - SERVICE_PRINCIPAL_KEY = "ServicePrincipalKey" - SERVICE_PRINCIPAL_CERT = "ServicePrincipalCert" - class DynamicsSinkWriteBehavior(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Defines values for DynamicsSinkWriteBehavior. """ @@ -267,7 +256,7 @@ class HBaseAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum BASIC = "Basic" class HdiNodeTypes(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """The node types on which the script action should be executed. + """All available HdiNodeTypes values. """ HEADNODE = "Headnode" @@ -417,8 +406,7 @@ class JsonFormatFilePattern(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)) ARRAY_OF_OBJECTS = "arrayOfObjects" class JsonWriteFilePattern(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """File pattern of JSON. This setting controls the way a collection of JSON objects will be - treated. The default value is 'setOfObjects'. It is case-sensitive. + """All available filePatterns. """ SET_OF_OBJECTS = "setOfObjects" @@ -661,6 +649,13 @@ class ServiceNowAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, BASIC = "Basic" O_AUTH2 = "OAuth2" +class ServicePrincipalCredentialType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """All available servicePrincipalCredentialType values. + """ + + SERVICE_PRINCIPAL_KEY = "ServicePrincipalKey" + SERVICE_PRINCIPAL_CERT = "ServicePrincipalCert" + class SftpAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The authentication type to be used to connect to the FTP server. """ @@ -702,6 +697,13 @@ class SqlAlwaysEncryptedAkvAuthType(with_metaclass(_CaseInsensitiveEnumMeta, str SERVICE_PRINCIPAL = "ServicePrincipal" MANAGED_IDENTITY = "ManagedIdentity" +class SqlDwWriteBehaviorEnum(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Specify the write behavior when copying data into sql dw. + """ + + INSERT = "Insert" + UPSERT = "Upsert" + class SqlPartitionOption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The partition mechanism that will be used for Sql read in parallel. """ @@ -710,6 +712,14 @@ class SqlPartitionOption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): PHYSICAL_PARTITIONS_OF_TABLE = "PhysicalPartitionsOfTable" DYNAMIC_RANGE = "DynamicRange" +class SqlWriteBehaviorEnum(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Specify the write behavior when copying data into sql. + """ + + INSERT = "Insert" + UPSERT = "Upsert" + STORED_PROCEDURE = "StoredProcedure" + class SsisLogLocationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The type of SSIS log location. """ diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models.py index e97fd0ab305..fb43215b43c 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models.py @@ -13,7 +13,7 @@ class AccessPolicyResponse(msrest.serialization.Model): """Get Data Plane read only token response definition. :param policy: The user access policy. - :type policy: ~data_factory_management_client.models.UserAccessPolicy + :type policy: ~azure.mgmt.datafactory.models.UserAccessPolicy :param access_token: Data Plane read only access token. :type access_token: str :param data_plane_url: Data Plane service base URL. @@ -54,9 +54,9 @@ class Activity(msrest.serialization.Model): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] """ _validation = { @@ -101,8 +101,7 @@ class ActivityDependency(msrest.serialization.Model): :param activity: Required. Activity name. :type activity: str :param dependency_conditions: Required. Match-Condition for the dependency. - :type dependency_conditions: list[str or - ~data_factory_management_client.models.DependencyCondition] + :type dependency_conditions: list[str or ~azure.mgmt.datafactory.models.DependencyCondition] """ _validation = { @@ -272,7 +271,7 @@ class ActivityRunsQueryResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. :param value: Required. List of activity runs. - :type value: list[~data_factory_management_client.models.ActivityRun] + :type value: list[~azure.mgmt.datafactory.models.ActivityRun] :param continuation_token: The continuation token for getting the next page of results, if any remaining results exist, null otherwise. :type continuation_token: str @@ -352,11 +351,11 @@ class LinkedService(msrest.serialization.Model): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] """ @@ -402,11 +401,11 @@ class AmazonMwsLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param endpoint: Required. The endpoint of the Amazon MWS server, (i.e. @@ -419,11 +418,11 @@ class AmazonMwsLinkedService(LinkedService): :param seller_id: Required. The Amazon seller ID. :type seller_id: object :param mws_auth_token: The Amazon MWS authentication token. - :type mws_auth_token: ~data_factory_management_client.models.SecretBase + :type mws_auth_token: ~azure.mgmt.datafactory.models.SecretBase :param access_key_id: Required. The access key id used to access data. :type access_key_id: object :param secret_key: The secret key used to access data. - :type secret_key: ~data_factory_management_client.models.SecretBase + :type secret_key: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. :type use_encrypted_endpoints: object @@ -507,14 +506,14 @@ class Dataset(msrest.serialization.Model): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder """ _validation = { @@ -573,14 +572,14 @@ class AmazonMwsObjectDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ @@ -634,6 +633,9 @@ class CopySource(msrest.serialization.Model): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object """ _validation = { @@ -646,6 +648,7 @@ class CopySource(msrest.serialization.Model): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, } _subtype_map = { @@ -662,6 +665,7 @@ def __init__( self.source_retry_count = kwargs.get('source_retry_count', None) self.source_retry_wait = kwargs.get('source_retry_wait', None) self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) + self.disable_metrics_collection = kwargs.get('disable_metrics_collection', None) class TabularSource(CopySource): @@ -686,12 +690,15 @@ class TabularSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -704,8 +711,9 @@ class TabularSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } _subtype_map = { @@ -741,12 +749,15 @@ class AmazonMwsSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -762,8 +773,9 @@ class AmazonMwsSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -787,11 +799,11 @@ class AmazonRedshiftLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param server: Required. The name of the Amazon Redshift server. Type: string (or Expression @@ -801,7 +813,7 @@ class AmazonRedshiftLinkedService(LinkedService): resultType string). :type username: object :param password: The password of the Amazon Redshift source. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param database: Required. The database name of the Amazon Redshift source. Type: string (or Expression with resultType string). :type database: object @@ -868,18 +880,21 @@ class AmazonRedshiftSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object :param redshift_unload_settings: The Amazon S3 settings needed for the interim Amazon S3 when copying from Amazon Redshift with unload. With this, data from Amazon Redshift source will be unloaded into S3 first and then copied into the targeted sink from the interim S3. - :type redshift_unload_settings: ~data_factory_management_client.models.RedshiftUnloadSettings + :type redshift_unload_settings: ~azure.mgmt.datafactory.models.RedshiftUnloadSettings """ _validation = { @@ -892,8 +907,9 @@ class AmazonRedshiftSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'redshift_unload_settings': {'key': 'redshiftUnloadSettings', 'type': 'RedshiftUnloadSettings'}, } @@ -927,14 +943,14 @@ class AmazonRedshiftTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. :type table_name: object @@ -988,11 +1004,11 @@ class AmazonS3CompatibleLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param access_key_id: The access key identifier of the Amazon S3 Compatible Identity and Access @@ -1000,7 +1016,7 @@ class AmazonS3CompatibleLinkedService(LinkedService): :type access_key_id: object :param secret_access_key: The secret access key of the Amazon S3 Compatible Identity and Access Management (IAM) user. - :type secret_access_key: ~data_factory_management_client.models.SecretBase + :type secret_access_key: ~azure.mgmt.datafactory.models.SecretBase :param service_url: This value specifies the endpoint to access with the Amazon S3 Compatible Connector. This is an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType @@ -1156,6 +1172,9 @@ class StoreReadSettings(msrest.serialization.Model): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object """ _validation = { @@ -1166,6 +1185,7 @@ class StoreReadSettings(msrest.serialization.Model): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, } _subtype_map = { @@ -1180,6 +1200,7 @@ def __init__( self.additional_properties = kwargs.get('additional_properties', None) self.type = 'StoreReadSettings' # type: str self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) + self.disable_metrics_collection = kwargs.get('disable_metrics_collection', None) class AmazonS3CompatibleReadSettings(StoreReadSettings): @@ -1195,6 +1216,9 @@ class AmazonS3CompatibleReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -1235,6 +1259,7 @@ class AmazonS3CompatibleReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -1284,14 +1309,14 @@ class AmazonS3Dataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param bucket_name: Required. The name of the Amazon S3 bucket. Type: string (or Expression with resultType string). :type bucket_name: object @@ -1311,9 +1336,9 @@ class AmazonS3Dataset(Dataset): Expression with resultType string). :type modified_datetime_end: object :param format: The format of files. - :type format: ~data_factory_management_client.models.DatasetStorageFormat + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :param compression: The data compression method used for the Amazon S3 object. - :type compression: ~data_factory_management_client.models.DatasetCompression + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression """ _validation = { @@ -1369,11 +1394,11 @@ class AmazonS3LinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param authentication_type: The authentication type of S3. Allowed value: AccessKey (default) @@ -1384,13 +1409,13 @@ class AmazonS3LinkedService(LinkedService): :type access_key_id: object :param secret_access_key: The secret access key of the Amazon S3 Identity and Access Management (IAM) user. - :type secret_access_key: ~data_factory_management_client.models.SecretBase + :type secret_access_key: ~azure.mgmt.datafactory.models.SecretBase :param service_url: This value specifies the endpoint to access with the S3 Connector. This is an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). :type service_url: object :param session_token: The session token for the S3 temporary security credential. - :type session_token: ~data_factory_management_client.models.SecretBase + :type session_token: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -1490,6 +1515,9 @@ class AmazonS3ReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -1530,6 +1558,7 @@ class AmazonS3ReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -1575,9 +1604,9 @@ class AppendVariableActivity(Activity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param variable_name: Name of the variable whose value needs to be appended to. :type variable_name: str :param value: Value to be appended. Could be a static value or Expression. @@ -1654,20 +1683,19 @@ class AvroDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param location: The location of the avro storage. - :type location: ~data_factory_management_client.models.DatasetLocation - :param avro_compression_codec: Possible values include: "none", "deflate", "snappy", "xz", - "bzip2". - :type avro_compression_codec: str or - ~data_factory_management_client.models.AvroCompressionCodec + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param avro_compression_codec: The data avroCompressionCodec. Type: string (or Expression with + resultType string). + :type avro_compression_codec: object :param avro_compression_level: :type avro_compression_level: int """ @@ -1689,7 +1717,7 @@ class AvroDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'avro_compression_codec': {'key': 'typeProperties.avroCompressionCodec', 'type': 'str'}, + 'avro_compression_codec': {'key': 'typeProperties.avroCompressionCodec', 'type': 'object'}, 'avro_compression_level': {'key': 'typeProperties.avroCompressionLevel', 'type': 'int'}, } @@ -1788,7 +1816,7 @@ class CopySink(msrest.serialization.Model): """A copy activity sink. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AvroSink, AzureBlobFsSink, AzureDataExplorerSink, AzureDataLakeStoreSink, AzureDatabricksDeltaLakeSink, AzureMySqlSink, AzurePostgreSqlSink, AzureQueueSink, AzureSearchIndexSink, AzureSqlSink, AzureTableSink, BinarySink, BlobSink, CommonDataServiceForAppsSink, CosmosDbMongoDbApiSink, CosmosDbSqlApiSink, DelimitedTextSink, DocumentDbCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, InformixSink, JsonSink, MicrosoftAccessSink, OdbcSink, OracleSink, OrcSink, ParquetSink, RestSink, SalesforceServiceCloudSink, SalesforceSink, SapCloudForCustomerSink, SnowflakeSink, SqlDwSink, SqlMiSink, SqlServerSink, SqlSink. + sub-classes are: AvroSink, AzureBlobFsSink, AzureDataExplorerSink, AzureDataLakeStoreSink, AzureDatabricksDeltaLakeSink, AzureMySqlSink, AzurePostgreSqlSink, AzureQueueSink, AzureSearchIndexSink, AzureSqlSink, AzureTableSink, BinarySink, BlobSink, CommonDataServiceForAppsSink, CosmosDbMongoDbApiSink, CosmosDbSqlApiSink, DelimitedTextSink, DocumentDbCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, InformixSink, JsonSink, MicrosoftAccessSink, MongoDbAtlasSink, MongoDbV2Sink, OdbcSink, OracleSink, OrcSink, ParquetSink, RestSink, SalesforceServiceCloudSink, SalesforceSink, SapCloudForCustomerSink, SnowflakeSink, SqlDwSink, SqlMiSink, SqlServerSink, SqlSink. All required parameters must be populated in order to send to Azure. @@ -1812,6 +1840,9 @@ class CopySink(msrest.serialization.Model): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object """ _validation = { @@ -1826,10 +1857,11 @@ class CopySink(msrest.serialization.Model): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, } _subtype_map = { - 'type': {'AvroSink': 'AvroSink', 'AzureBlobFSSink': 'AzureBlobFsSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'AzureDatabricksDeltaLakeSink': 'AzureDatabricksDeltaLakeSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'AzureQueueSink': 'AzureQueueSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureSqlSink': 'AzureSqlSink', 'AzureTableSink': 'AzureTableSink', 'BinarySink': 'BinarySink', 'BlobSink': 'BlobSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DelimitedTextSink': 'DelimitedTextSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'FileSystemSink': 'FileSystemSink', 'InformixSink': 'InformixSink', 'JsonSink': 'JsonSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'OdbcSink': 'OdbcSink', 'OracleSink': 'OracleSink', 'OrcSink': 'OrcSink', 'ParquetSink': 'ParquetSink', 'RestSink': 'RestSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SqlDwSink', 'SqlMISink': 'SqlMiSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink'} + 'type': {'AvroSink': 'AvroSink', 'AzureBlobFSSink': 'AzureBlobFsSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'AzureDatabricksDeltaLakeSink': 'AzureDatabricksDeltaLakeSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'AzureQueueSink': 'AzureQueueSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureSqlSink': 'AzureSqlSink', 'AzureTableSink': 'AzureTableSink', 'BinarySink': 'BinarySink', 'BlobSink': 'BlobSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DelimitedTextSink': 'DelimitedTextSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'FileSystemSink': 'FileSystemSink', 'InformixSink': 'InformixSink', 'JsonSink': 'JsonSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'MongoDbAtlasSink': 'MongoDbAtlasSink', 'MongoDbV2Sink': 'MongoDbV2Sink', 'OdbcSink': 'OdbcSink', 'OracleSink': 'OracleSink', 'OrcSink': 'OrcSink', 'ParquetSink': 'ParquetSink', 'RestSink': 'RestSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SqlDwSink', 'SqlMISink': 'SqlMiSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink'} } def __init__( @@ -1844,6 +1876,7 @@ def __init__( self.sink_retry_count = kwargs.get('sink_retry_count', None) self.sink_retry_wait = kwargs.get('sink_retry_wait', None) self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) + self.disable_metrics_collection = kwargs.get('disable_metrics_collection', None) class AvroSink(CopySink): @@ -1871,10 +1904,13 @@ class AvroSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Avro store settings. - :type store_settings: ~data_factory_management_client.models.StoreWriteSettings + :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings :param format_settings: Avro format settings. - :type format_settings: ~data_factory_management_client.models.AvroWriteSettings + :type format_settings: ~azure.mgmt.datafactory.models.AvroWriteSettings """ _validation = { @@ -1889,6 +1925,7 @@ class AvroSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'AvroWriteSettings'}, } @@ -1922,11 +1959,14 @@ class AvroSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Avro store settings. - :type store_settings: ~data_factory_management_client.models.StoreReadSettings + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -1939,8 +1979,9 @@ class AvroSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -2111,18 +2152,18 @@ class AzureBatchLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param account_name: Required. The Azure Batch account name. Type: string (or Expression with resultType string). :type account_name: object :param access_key: The Azure Batch account access key. - :type access_key: ~data_factory_management_client.models.SecretBase + :type access_key: ~azure.mgmt.datafactory.models.SecretBase :param batch_uri: Required. The Azure Batch URI. Type: string (or Expression with resultType string). :type batch_uri: object @@ -2130,11 +2171,13 @@ class AzureBatchLinkedService(LinkedService): resultType string). :type pool_name: object :param linked_service_name: Required. The Azure Storage linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object + :param credential: The credential reference containing authentication information. + :type credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { @@ -2158,6 +2201,7 @@ class AzureBatchLinkedService(LinkedService): 'pool_name': {'key': 'typeProperties.poolName', 'type': 'object'}, 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, } def __init__( @@ -2172,6 +2216,7 @@ def __init__( self.pool_name = kwargs['pool_name'] self.linked_service_name = kwargs['linked_service_name'] self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.credential = kwargs.get('credential', None) class AzureBlobDataset(Dataset): @@ -2193,14 +2238,14 @@ class AzureBlobDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param folder_path: The path of the Azure Blob storage. Type: string (or Expression with resultType string). :type folder_path: object @@ -2217,9 +2262,9 @@ class AzureBlobDataset(Dataset): Expression with resultType string). :type modified_datetime_end: object :param format: The format of the Azure Blob storage. - :type format: ~data_factory_management_client.models.DatasetStorageFormat + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :param compression: The data compression method used for the blob storage. - :type compression: ~data_factory_management_client.models.DatasetCompression + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression """ _validation = { @@ -2280,14 +2325,14 @@ class AzureBlobFsDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param folder_path: The path of the Azure Data Lake Storage Gen2 storage. Type: string (or Expression with resultType string). :type folder_path: object @@ -2295,9 +2340,9 @@ class AzureBlobFsDataset(Dataset): with resultType string). :type file_name: object :param format: The format of the Azure Data Lake Storage Gen2 storage. - :type format: ~data_factory_management_client.models.DatasetStorageFormat + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :param compression: The data compression method used for the blob storage. - :type compression: ~data_factory_management_client.models.DatasetCompression + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression """ _validation = { @@ -2344,11 +2389,11 @@ class AzureBlobFsLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param url: Required. Endpoint for the Azure Data Lake Storage Gen2 service. Type: string (or @@ -2362,7 +2407,7 @@ class AzureBlobFsLinkedService(LinkedService): :type service_principal_id: object :param service_principal_key: The Key of the application used to authenticate against the Azure Data Lake Storage Gen2 account. - :type service_principal_key: ~data_factory_management_client.models.SecretBase + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object @@ -2374,6 +2419,8 @@ class AzureBlobFsLinkedService(LinkedService): encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object + :param credential: The credential reference containing authentication information. + :type credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { @@ -2395,6 +2442,7 @@ class AzureBlobFsLinkedService(LinkedService): 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, } def __init__( @@ -2410,6 +2458,7 @@ def __init__( self.tenant = kwargs.get('tenant', None) self.azure_cloud_type = kwargs.get('azure_cloud_type', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.credential = kwargs.get('credential', None) class AzureBlobFsLocation(DatasetLocation): @@ -2467,6 +2516,9 @@ class AzureBlobFsReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -2504,6 +2556,7 @@ class AzureBlobFsReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -2557,8 +2610,14 @@ class AzureBlobFsSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object + :param metadata: Specify the custom metadata to be added to sink data. Type: array of objects + (or Expression with resultType array of objects). + :type metadata: list[~azure.mgmt.datafactory.models.MetadataItem] """ _validation = { @@ -2573,7 +2632,9 @@ class AzureBlobFsSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'metadata': {'key': 'metadata', 'type': '[MetadataItem]'}, } def __init__( @@ -2583,6 +2644,7 @@ def __init__( super(AzureBlobFsSink, self).__init__(**kwargs) self.type = 'AzureBlobFSSink' # type: str self.copy_behavior = kwargs.get('copy_behavior', None) + self.metadata = kwargs.get('metadata', None) class AzureBlobFsSource(CopySource): @@ -2604,6 +2666,9 @@ class AzureBlobFsSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param treat_empty_as_null: Treat empty as null. Type: boolean (or Expression with resultType boolean). :type treat_empty_as_null: object @@ -2625,6 +2690,7 @@ class AzureBlobFsSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, @@ -2657,6 +2723,9 @@ class StoreWriteSettings(msrest.serialization.Model): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object """ @@ -2669,6 +2738,7 @@ class StoreWriteSettings(msrest.serialization.Model): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } @@ -2684,6 +2754,7 @@ def __init__( self.additional_properties = kwargs.get('additional_properties', None) self.type = 'StoreWriteSettings' # type: str self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) + self.disable_metrics_collection = kwargs.get('disable_metrics_collection', None) self.copy_behavior = kwargs.get('copy_behavior', None) @@ -2700,6 +2771,9 @@ class AzureBlobFsWriteSettings(StoreWriteSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object :param block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: integer @@ -2715,6 +2789,7 @@ class AzureBlobFsWriteSettings(StoreWriteSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, } @@ -2739,24 +2814,24 @@ class AzureBlobStorageLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: The connection string. It is mutually exclusive with sasUri, serviceEndpoint property. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param account_key: The Azure key vault secret reference of accountKey in connection string. - :type account_key: ~data_factory_management_client.models.AzureKeyVaultSecretReference + :type account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param sas_uri: SAS URI of the Azure Blob Storage resource. It is mutually exclusive with connectionString, serviceEndpoint property. Type: string, SecureString or AzureKeyVaultSecretReference. :type sas_uri: object :param sas_token: The Azure key vault secret reference of sasToken in sas uri. - :type sas_token: ~data_factory_management_client.models.AzureKeyVaultSecretReference + :type sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param service_endpoint: Blob service endpoint of the Azure Blob Storage resource. It is mutually exclusive with connectionString, sasUri property. :type service_endpoint: str @@ -2765,7 +2840,7 @@ class AzureBlobStorageLinkedService(LinkedService): :type service_principal_id: object :param service_principal_key: The key of the service principal used to authenticate against Azure SQL Data Warehouse. - :type service_principal_key: ~data_factory_management_client.models.SecretBase + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object @@ -2781,6 +2856,8 @@ class AzureBlobStorageLinkedService(LinkedService): encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: str + :param credential: The credential reference containing authentication information. + :type credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { @@ -2805,6 +2882,7 @@ class AzureBlobStorageLinkedService(LinkedService): 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'account_kind': {'key': 'typeProperties.accountKind', 'type': 'str'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, } def __init__( @@ -2824,6 +2902,7 @@ def __init__( self.azure_cloud_type = kwargs.get('azure_cloud_type', None) self.account_kind = kwargs.get('account_kind', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.credential = kwargs.get('credential', None) class AzureBlobStorageLocation(DatasetLocation): @@ -2881,6 +2960,9 @@ class AzureBlobStorageReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -2921,6 +3003,7 @@ class AzureBlobStorageReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -2964,6 +3047,9 @@ class AzureBlobStorageWriteSettings(StoreWriteSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object :param block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: integer @@ -2979,6 +3065,7 @@ class AzureBlobStorageWriteSettings(StoreWriteSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, } @@ -3011,14 +3098,14 @@ class AzureDatabricksDeltaLakeDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table: The name of delta table. Type: string (or Expression with resultType string). :type table: object :param database: The database name of delta table. Type: string (or Expression with resultType @@ -3218,11 +3305,11 @@ class AzureDatabricksDeltaLakeLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks @@ -3231,7 +3318,7 @@ class AzureDatabricksDeltaLakeLinkedService(LinkedService): :param access_token: Access token for databricks REST API. Refer to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string, SecureString or AzureKeyVaultSecretReference. - :type access_token: ~data_factory_management_client.models.SecretBase + :type access_token: ~azure.mgmt.datafactory.models.SecretBase :param cluster_id: The id of an existing interactive cluster that will be used for all runs of this job. Type: string (or Expression with resultType string). :type cluster_id: object @@ -3296,12 +3383,14 @@ class AzureDatabricksDeltaLakeSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). :type pre_copy_script: object :param import_settings: Azure Databricks Delta Lake import settings. - :type import_settings: - ~data_factory_management_client.models.AzureDatabricksDeltaLakeImportCommand + :type import_settings: ~azure.mgmt.datafactory.models.AzureDatabricksDeltaLakeImportCommand """ _validation = { @@ -3316,6 +3405,7 @@ class AzureDatabricksDeltaLakeSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'import_settings': {'key': 'importSettings', 'type': 'AzureDatabricksDeltaLakeImportCommand'}, } @@ -3349,12 +3439,14 @@ class AzureDatabricksDeltaLakeSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: Azure Databricks Delta Lake Sql query. Type: string (or Expression with resultType string). :type query: object :param export_settings: Azure Databricks Delta Lake export settings. - :type export_settings: - ~data_factory_management_client.models.AzureDatabricksDeltaLakeExportCommand + :type export_settings: ~azure.mgmt.datafactory.models.AzureDatabricksDeltaLakeExportCommand """ _validation = { @@ -3367,6 +3459,7 @@ class AzureDatabricksDeltaLakeSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'export_settings': {'key': 'exportSettings', 'type': 'AzureDatabricksDeltaLakeExportCommand'}, } @@ -3392,11 +3485,11 @@ class AzureDatabricksLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks @@ -3405,7 +3498,7 @@ class AzureDatabricksLinkedService(LinkedService): :param access_token: Access token for databricks REST API. Refer to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string (or Expression with resultType string). - :type access_token: ~data_factory_management_client.models.SecretBase + :type access_token: ~azure.mgmt.datafactory.models.SecretBase :param authentication: Required to specify MSI, if using Workspace resource id for databricks REST API. Type: string (or Expression with resultType string). :type authentication: object @@ -3464,6 +3557,8 @@ class AzureDatabricksLinkedService(LinkedService): :param policy_id: The policy id for limiting the ability to configure clusters based on a user defined set of rules. Type: string (or Expression with resultType string). :type policy_id: object + :param credential: The credential reference containing authentication information. + :type credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { @@ -3496,6 +3591,7 @@ class AzureDatabricksLinkedService(LinkedService): 'new_cluster_enable_elastic_disk': {'key': 'typeProperties.newClusterEnableElasticDisk', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, 'policy_id': {'key': 'typeProperties.policyId', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, } def __init__( @@ -3522,6 +3618,7 @@ def __init__( self.new_cluster_enable_elastic_disk = kwargs.get('new_cluster_enable_elastic_disk', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) self.policy_id = kwargs.get('policy_id', None) + self.credential = kwargs.get('credential', None) class ExecutionActivity(Activity): @@ -3542,13 +3639,13 @@ class ExecutionActivity(Activity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param linked_service_name: Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. - :type policy: ~data_factory_management_client.models.ActivityPolicy + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy """ _validation = { @@ -3596,13 +3693,13 @@ class AzureDataExplorerCommandActivity(ExecutionActivity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param linked_service_name: Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. - :type policy: ~data_factory_management_client.models.ActivityPolicy + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param command: Required. A control command, according to the Azure Data Explorer command syntax. Type: string (or Expression with resultType string). :type command: object @@ -3651,11 +3748,11 @@ class AzureDataExplorerLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param endpoint: Required. The endpoint of Azure Data Explorer (the engine's endpoint). URL @@ -3667,13 +3764,15 @@ class AzureDataExplorerLinkedService(LinkedService): :type service_principal_id: object :param service_principal_key: The key of the service principal used to authenticate against Kusto. - :type service_principal_key: ~data_factory_management_client.models.SecretBase + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param database: Required. Database name for connection. Type: string (or Expression with resultType string). :type database: object :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object + :param credential: The credential reference containing authentication information. + :type credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { @@ -3694,6 +3793,7 @@ class AzureDataExplorerLinkedService(LinkedService): 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'database': {'key': 'typeProperties.database', 'type': 'object'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, } def __init__( @@ -3707,6 +3807,7 @@ def __init__( self.service_principal_key = kwargs.get('service_principal_key', None) self.database = kwargs['database'] self.tenant = kwargs.get('tenant', None) + self.credential = kwargs.get('credential', None) class AzureDataExplorerSink(CopySink): @@ -3734,6 +3835,9 @@ class AzureDataExplorerSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param ingestion_mapping_name: A name of a pre-created csv mapping that was defined on the target Kusto table. Type: string. :type ingestion_mapping_name: object @@ -3757,6 +3861,7 @@ class AzureDataExplorerSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'ingestion_mapping_name': {'key': 'ingestionMappingName', 'type': 'object'}, 'ingestion_mapping_as_json': {'key': 'ingestionMappingAsJson', 'type': 'object'}, 'flush_immediately': {'key': 'flushImmediately', 'type': 'object'}, @@ -3792,6 +3897,9 @@ class AzureDataExplorerSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: Required. Database query. Should be a Kusto Query Language (KQL) query. Type: string (or Expression with resultType string). :type query: object @@ -3802,8 +3910,8 @@ class AzureDataExplorerSource(CopySource): pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).. :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -3817,10 +3925,11 @@ class AzureDataExplorerSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'no_truncation': {'key': 'noTruncation', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -3854,14 +3963,14 @@ class AzureDataExplorerTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table: The table name of the Azure Data Explorer database. Type: string (or Expression with resultType string). :type table: object @@ -3905,11 +4014,11 @@ class AzureDataLakeAnalyticsLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param account_name: Required. The Azure Data Lake Analytics account name. Type: string (or @@ -3920,7 +4029,7 @@ class AzureDataLakeAnalyticsLinkedService(LinkedService): :type service_principal_id: object :param service_principal_key: The Key of the application used to authenticate against the Azure Data Lake Analytics account. - :type service_principal_key: ~data_factory_management_client.models.SecretBase + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: Required. The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object @@ -3997,14 +4106,14 @@ class AzureDataLakeStoreDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param folder_path: Path to the folder in the Azure Data Lake Store. Type: string (or Expression with resultType string). :type folder_path: object @@ -4012,10 +4121,10 @@ class AzureDataLakeStoreDataset(Dataset): Expression with resultType string). :type file_name: object :param format: The format of the Data Lake Store. - :type format: ~data_factory_management_client.models.DatasetStorageFormat + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :param compression: The data compression method used for the item(s) in the Azure Data Lake Store. - :type compression: ~data_factory_management_client.models.DatasetCompression + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression """ _validation = { @@ -4062,11 +4171,11 @@ class AzureDataLakeStoreLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param data_lake_store_uri: Required. Data Lake Store service URI. Type: string (or Expression @@ -4077,7 +4186,7 @@ class AzureDataLakeStoreLinkedService(LinkedService): :type service_principal_id: object :param service_principal_key: The Key of the application used to authenticate against the Azure Data Lake Store account. - :type service_principal_key: ~data_factory_management_client.models.SecretBase + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object @@ -4098,6 +4207,8 @@ class AzureDataLakeStoreLinkedService(LinkedService): encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object + :param credential: The credential reference containing authentication information. + :type credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { @@ -4121,6 +4232,7 @@ class AzureDataLakeStoreLinkedService(LinkedService): 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, } def __init__( @@ -4138,6 +4250,7 @@ def __init__( self.subscription_id = kwargs.get('subscription_id', None) self.resource_group_name = kwargs.get('resource_group_name', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.credential = kwargs.get('credential', None) class AzureDataLakeStoreLocation(DatasetLocation): @@ -4190,6 +4303,9 @@ class AzureDataLakeStoreReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -4235,6 +4351,7 @@ class AzureDataLakeStoreReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -4292,6 +4409,9 @@ class AzureDataLakeStoreSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object :param enable_adls_single_file_parallel: Single File Parallel. @@ -4310,6 +4430,7 @@ class AzureDataLakeStoreSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'enable_adls_single_file_parallel': {'key': 'enableAdlsSingleFileParallel', 'type': 'object'}, } @@ -4343,6 +4464,9 @@ class AzureDataLakeStoreSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -4358,6 +4482,7 @@ class AzureDataLakeStoreSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, } @@ -4383,6 +4508,9 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object :param expiry_date_time: Specifies the expiry time of the written files. The time is applied to @@ -4399,6 +4527,7 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'expiry_date_time': {'key': 'expiryDateTime', 'type': 'object'}, } @@ -4423,11 +4552,11 @@ class AzureFileStorageLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param host: Host name of the server. Type: string (or Expression with resultType string). @@ -4436,17 +4565,17 @@ class AzureFileStorageLinkedService(LinkedService): string). :type user_id: object :param password: Password to logon the server. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param connection_string: The connection string. It is mutually exclusive with sasUri property. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param account_key: The Azure key vault secret reference of accountKey in connection string. - :type account_key: ~data_factory_management_client.models.AzureKeyVaultSecretReference + :type account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param sas_uri: SAS URI of the Azure File resource. It is mutually exclusive with connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. :type sas_uri: object :param sas_token: The Azure key vault secret reference of sasToken in sas uri. - :type sas_token: ~data_factory_management_client.models.AzureKeyVaultSecretReference + :type sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param file_share: The azure file share name. It is required when auth with accountKey/sasToken. Type: string (or Expression with resultType string). :type file_share: object @@ -4550,6 +4679,9 @@ class AzureFileStorageReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -4590,6 +4722,7 @@ class AzureFileStorageReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -4633,6 +4766,9 @@ class AzureFileStorageWriteSettings(StoreWriteSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object """ @@ -4645,6 +4781,7 @@ class AzureFileStorageWriteSettings(StoreWriteSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } @@ -4671,16 +4808,16 @@ class AzureFunctionActivity(ExecutionActivity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param linked_service_name: Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. - :type policy: ~data_factory_management_client.models.ActivityPolicy + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param method: Required. Rest API method for target endpoint. Possible values include: "GET", "POST", "PUT", "DELETE", "OPTIONS", "HEAD", "TRACE". - :type method: str or ~data_factory_management_client.models.AzureFunctionActivityMethod + :type method: str or ~azure.mgmt.datafactory.models.AzureFunctionActivityMethod :param function_name: Required. Name of the Function that the Azure Function Activity will call. Type: string (or Expression with resultType string). :type function_name: object @@ -4738,22 +4875,29 @@ class AzureFunctionLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param function_app_url: Required. The endpoint of the Azure Function App. URL will be in the format https://:code:``.azurewebsites.net. :type function_app_url: object :param function_key: Function or Host key for Azure Function App. - :type function_key: ~data_factory_management_client.models.SecretBase + :type function_key: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object + :param credential: The credential reference containing authentication information. + :type credential: ~azure.mgmt.datafactory.models.CredentialReference + :param resource_id: Allowed token audiences for azure function. + :type resource_id: object + :param authentication: Type of authentication (Required to specify MSI) used to connect to + AzureFunction. Type: string (or Expression with resultType string). + :type authentication: object """ _validation = { @@ -4771,6 +4915,9 @@ class AzureFunctionLinkedService(LinkedService): 'function_app_url': {'key': 'typeProperties.functionAppUrl', 'type': 'object'}, 'function_key': {'key': 'typeProperties.functionKey', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, + 'resource_id': {'key': 'typeProperties.resourceId', 'type': 'object'}, + 'authentication': {'key': 'typeProperties.authentication', 'type': 'object'}, } def __init__( @@ -4782,6 +4929,9 @@ def __init__( self.function_app_url = kwargs['function_app_url'] self.function_key = kwargs.get('function_key', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.credential = kwargs.get('credential', None) + self.resource_id = kwargs.get('resource_id', None) + self.authentication = kwargs.get('authentication', None) class AzureKeyVaultLinkedService(LinkedService): @@ -4795,16 +4945,18 @@ class AzureKeyVaultLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param base_url: Required. The base URL of the Azure Key Vault. e.g. https://myakv.vault.azure.net Type: string (or Expression with resultType string). :type base_url: object + :param credential: The credential reference containing authentication information. + :type credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { @@ -4820,6 +4972,7 @@ class AzureKeyVaultLinkedService(LinkedService): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'base_url': {'key': 'typeProperties.baseUrl', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, } def __init__( @@ -4829,6 +4982,7 @@ def __init__( super(AzureKeyVaultLinkedService, self).__init__(**kwargs) self.type = 'AzureKeyVault' # type: str self.base_url = kwargs['base_url'] + self.credential = kwargs.get('credential', None) class SecretBase(msrest.serialization.Model): @@ -4871,7 +5025,7 @@ class AzureKeyVaultSecretReference(SecretBase): :param type: Required. Type of the secret.Constant filled by server. :type type: str :param store: Required. The Azure Key Vault linked service reference. - :type store: ~data_factory_management_client.models.LinkedServiceReference + :type store: ~azure.mgmt.datafactory.models.LinkedServiceReference :param secret_name: Required. The name of the secret in Azure Key Vault. Type: string (or Expression with resultType string). :type secret_name: object @@ -4915,18 +5069,18 @@ class AzureMariaDbLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param pwd: The Azure key vault secret reference of password in connection string. - :type pwd: ~data_factory_management_client.models.AzureKeyVaultSecretReference + :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -4979,12 +5133,15 @@ class AzureMariaDbSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -5000,8 +5157,9 @@ class AzureMariaDbSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -5033,14 +5191,14 @@ class AzureMariaDbTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ @@ -5087,13 +5245,13 @@ class AzureMlBatchExecutionActivity(ExecutionActivity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param linked_service_name: Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. - :type policy: ~data_factory_management_client.models.ActivityPolicy + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param global_parameters: Key,Value pairs to be passed to the Azure ML Batch Execution Service endpoint. Keys must match the names of web service parameters defined in the published Azure ML web service. Values will be passed in the GlobalParameters property of the Azure ML batch @@ -5103,14 +5261,12 @@ class AzureMlBatchExecutionActivity(ExecutionActivity): Service Outputs to AzureMLWebServiceFile objects specifying the output Blob locations. This information will be passed in the WebServiceOutputs property of the Azure ML batch execution request. - :type web_service_outputs: dict[str, - ~data_factory_management_client.models.AzureMlWebServiceFile] + :type web_service_outputs: dict[str, ~azure.mgmt.datafactory.models.AzureMlWebServiceFile] :param web_service_inputs: Key,Value pairs, mapping the names of Azure ML endpoint's Web Service Inputs to AzureMLWebServiceFile objects specifying the input Blob locations.. This information will be passed in the WebServiceInputs property of the Azure ML batch execution request. - :type web_service_inputs: dict[str, - ~data_factory_management_client.models.AzureMlWebServiceFile] + :type web_service_inputs: dict[str, ~azure.mgmt.datafactory.models.AzureMlWebServiceFile] """ _validation = { @@ -5158,13 +5314,13 @@ class AzureMlExecutePipelineActivity(ExecutionActivity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param linked_service_name: Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. - :type policy: ~data_factory_management_client.models.ActivityPolicy + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param ml_pipeline_id: ID of the published Azure ML pipeline. Type: string (or Expression with resultType string). :type ml_pipeline_id: object @@ -5249,18 +5405,18 @@ class AzureMlLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param ml_endpoint: Required. The Batch Execution REST URL for an Azure ML Studio Web Service endpoint. Type: string (or Expression with resultType string). :type ml_endpoint: object :param api_key: Required. The API key for accessing the Azure ML model endpoint. - :type api_key: ~data_factory_management_client.models.SecretBase + :type api_key: ~azure.mgmt.datafactory.models.SecretBase :param update_resource_endpoint: The Update Resource REST URL for an Azure ML Studio Web Service endpoint. Type: string (or Expression with resultType string). :type update_resource_endpoint: object @@ -5270,7 +5426,7 @@ class AzureMlLinkedService(LinkedService): :type service_principal_id: object :param service_principal_key: The key of the service principal used to authenticate against the ARM-based updateResourceEndpoint of an Azure ML Studio web service. - :type service_principal_key: ~data_factory_management_client.models.SecretBase + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object @@ -5278,6 +5434,9 @@ class AzureMlLinkedService(LinkedService): encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object + :param authentication: Type of authentication (Required to specify MSI) used to connect to + AzureML. Type: string (or Expression with resultType string). + :type authentication: object """ _validation = { @@ -5300,6 +5459,7 @@ class AzureMlLinkedService(LinkedService): 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'authentication': {'key': 'typeProperties.authentication', 'type': 'object'}, } def __init__( @@ -5315,6 +5475,7 @@ def __init__( self.service_principal_key = kwargs.get('service_principal_key', None) self.tenant = kwargs.get('tenant', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.authentication = kwargs.get('authentication', None) class AzureMlServiceLinkedService(LinkedService): @@ -5328,11 +5489,11 @@ class AzureMlServiceLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param subscription_id: Required. Azure ML Service workspace subscription ID. Type: string (or @@ -5350,7 +5511,7 @@ class AzureMlServiceLinkedService(LinkedService): :type service_principal_id: object :param service_principal_key: The key of the service principal used to authenticate against the endpoint of a published Azure ML Service pipeline. - :type service_principal_key: ~data_factory_management_client.models.SecretBase + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object @@ -5413,20 +5574,19 @@ class AzureMlUpdateResourceActivity(ExecutionActivity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param linked_service_name: Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. - :type policy: ~data_factory_management_client.models.ActivityPolicy + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param trained_model_name: Required. Name of the Trained Model module in the Web Service experiment to be updated. Type: string (or Expression with resultType string). :type trained_model_name: object :param trained_model_linked_service_name: Required. Name of Azure Storage linked service holding the .ilearner file that will be uploaded by the update operation. - :type trained_model_linked_service_name: - ~data_factory_management_client.models.LinkedServiceReference + :type trained_model_linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param trained_model_file_path: Required. The relative file path in trainedModelLinkedService to represent the .ilearner file that will be uploaded by the update operation. Type: string (or Expression with resultType string). @@ -5476,7 +5636,7 @@ class AzureMlWebServiceFile(msrest.serialization.Model): :type file_path: object :param linked_service_name: Required. Reference to an Azure Storage LinkedService, where Azure ML WebService Input/Output file located. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference """ _validation = { @@ -5509,18 +5669,18 @@ class AzureMySqlLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: Required. The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param password: The Azure key vault secret reference of password in connection string. - :type password: ~data_factory_management_client.models.AzureKeyVaultSecretReference + :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -5580,6 +5740,9 @@ class AzureMySqlSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). :type pre_copy_script: object @@ -5597,6 +5760,7 @@ class AzureMySqlSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -5628,12 +5792,15 @@ class AzureMySqlSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object """ @@ -5648,8 +5815,9 @@ class AzureMySqlSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -5681,14 +5849,14 @@ class AzureMySqlTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The Azure MySQL database table name. Type: string (or Expression with resultType string). :type table_name: object @@ -5737,18 +5905,18 @@ class AzurePostgreSqlLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param password: The Azure key vault secret reference of password in connection string. - :type password: ~data_factory_management_client.models.AzureKeyVaultSecretReference + :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -5807,6 +5975,9 @@ class AzurePostgreSqlSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). :type pre_copy_script: object @@ -5824,6 +5995,7 @@ class AzurePostgreSqlSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -5855,12 +6027,15 @@ class AzurePostgreSqlSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -5876,8 +6051,9 @@ class AzurePostgreSqlSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -5909,14 +6085,14 @@ class AzurePostgreSqlTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name of the Azure PostgreSQL database which includes both schema and table. Type: string (or Expression with resultType string). :type table_name: object @@ -5984,6 +6160,9 @@ class AzureQueueSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object """ _validation = { @@ -5998,6 +6177,7 @@ class AzureQueueSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, } def __init__( @@ -6027,14 +6207,14 @@ class AzureSearchIndexDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param index_name: Required. The name of the Azure Search Index. Type: string (or Expression with resultType string). :type index_name: object @@ -6093,10 +6273,12 @@ class AzureSearchIndexSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param write_behavior: Specify the write behavior when upserting documents into Azure Search Index. Possible values include: "Merge", "Upload". - :type write_behavior: str or - ~data_factory_management_client.models.AzureSearchIndexWriteBehaviorType + :type write_behavior: str or ~azure.mgmt.datafactory.models.AzureSearchIndexWriteBehaviorType """ _validation = { @@ -6111,6 +6293,7 @@ class AzureSearchIndexSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, } @@ -6134,18 +6317,18 @@ class AzureSearchLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param url: Required. URL for Azure Search service. Type: string (or Expression with resultType string). :type url: object :param key: Admin Key for Azure Search service. - :type key: ~data_factory_management_client.models.SecretBase + :type key: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -6191,24 +6374,24 @@ class AzureSqlDatabaseLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: Required. The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param password: The Azure key vault secret reference of password in connection string. - :type password: ~data_factory_management_client.models.AzureKeyVaultSecretReference + :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param service_principal_id: The ID of the service principal used to authenticate against Azure SQL Database. Type: string (or Expression with resultType string). :type service_principal_id: object :param service_principal_key: The key of the service principal used to authenticate against Azure SQL Database. - :type service_principal_key: ~data_factory_management_client.models.SecretBase + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object @@ -6221,8 +6404,9 @@ class AzureSqlDatabaseLinkedService(LinkedService): resultType string). :type encrypted_credential: object :param always_encrypted_settings: Sql always encrypted properties. - :type always_encrypted_settings: - ~data_factory_management_client.models.SqlAlwaysEncryptedProperties + :type always_encrypted_settings: ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties + :param credential: The credential reference containing authentication information. + :type credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { @@ -6245,6 +6429,7 @@ class AzureSqlDatabaseLinkedService(LinkedService): 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, 'always_encrypted_settings': {'key': 'typeProperties.alwaysEncryptedSettings', 'type': 'SqlAlwaysEncryptedProperties'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, } def __init__( @@ -6261,6 +6446,7 @@ def __init__( self.azure_cloud_type = kwargs.get('azure_cloud_type', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) self.always_encrypted_settings = kwargs.get('always_encrypted_settings', None) + self.credential = kwargs.get('credential', None) class AzureSqlDwLinkedService(LinkedService): @@ -6274,24 +6460,24 @@ class AzureSqlDwLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: Required. The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param password: The Azure key vault secret reference of password in connection string. - :type password: ~data_factory_management_client.models.AzureKeyVaultSecretReference + :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param service_principal_id: The ID of the service principal used to authenticate against Azure SQL Data Warehouse. Type: string (or Expression with resultType string). :type service_principal_id: object :param service_principal_key: The key of the service principal used to authenticate against Azure SQL Data Warehouse. - :type service_principal_key: ~data_factory_management_client.models.SecretBase + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object @@ -6303,6 +6489,8 @@ class AzureSqlDwLinkedService(LinkedService): encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object + :param credential: The credential reference containing authentication information. + :type credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { @@ -6324,6 +6512,7 @@ class AzureSqlDwLinkedService(LinkedService): 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, } def __init__( @@ -6339,6 +6528,7 @@ def __init__( self.tenant = kwargs.get('tenant', None) self.azure_cloud_type = kwargs.get('azure_cloud_type', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.credential = kwargs.get('credential', None) class AzureSqlDwTableDataset(Dataset): @@ -6360,14 +6550,14 @@ class AzureSqlDwTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. :type table_name: object @@ -6421,24 +6611,24 @@ class AzureSqlMiLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: Required. The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param password: The Azure key vault secret reference of password in connection string. - :type password: ~data_factory_management_client.models.AzureKeyVaultSecretReference + :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param service_principal_id: The ID of the service principal used to authenticate against Azure SQL Managed Instance. Type: string (or Expression with resultType string). :type service_principal_id: object :param service_principal_key: The key of the service principal used to authenticate against Azure SQL Managed Instance. - :type service_principal_key: ~data_factory_management_client.models.SecretBase + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object @@ -6451,8 +6641,9 @@ class AzureSqlMiLinkedService(LinkedService): resultType string). :type encrypted_credential: object :param always_encrypted_settings: Sql always encrypted properties. - :type always_encrypted_settings: - ~data_factory_management_client.models.SqlAlwaysEncryptedProperties + :type always_encrypted_settings: ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties + :param credential: The credential reference containing authentication information. + :type credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { @@ -6475,6 +6666,7 @@ class AzureSqlMiLinkedService(LinkedService): 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, 'always_encrypted_settings': {'key': 'typeProperties.alwaysEncryptedSettings', 'type': 'SqlAlwaysEncryptedProperties'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, } def __init__( @@ -6491,6 +6683,7 @@ def __init__( self.azure_cloud_type = kwargs.get('azure_cloud_type', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) self.always_encrypted_settings = kwargs.get('always_encrypted_settings', None) + self.credential = kwargs.get('credential', None) class AzureSqlMiTableDataset(Dataset): @@ -6512,14 +6705,14 @@ class AzureSqlMiTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. :type table_name: object @@ -6587,6 +6780,9 @@ class AzureSqlSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). :type sql_writer_stored_procedure_name: object @@ -6598,13 +6794,21 @@ class AzureSqlSink(CopySink): :type pre_copy_script: object :param stored_procedure_parameters: SQL stored procedure parameters. :type stored_procedure_parameters: dict[str, - ~data_factory_management_client.models.StoredProcedureParameter] + ~azure.mgmt.datafactory.models.StoredProcedureParameter] :param stored_procedure_table_type_parameter_name: The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). :type stored_procedure_table_type_parameter_name: object :param table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). :type table_option: object + :param sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or + Expression with resultType boolean). + :type sql_writer_use_table_lock: object + :param write_behavior: Write behavior when copying data into Azure SQL. Type: + SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum). + :type write_behavior: object + :param upsert_settings: SQL upsert settings. + :type upsert_settings: ~azure.mgmt.datafactory.models.SqlUpsertSettings """ _validation = { @@ -6619,12 +6823,16 @@ class AzureSqlSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, 'table_option': {'key': 'tableOption', 'type': 'object'}, + 'sql_writer_use_table_lock': {'key': 'sqlWriterUseTableLock', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + 'upsert_settings': {'key': 'upsertSettings', 'type': 'SqlUpsertSettings'}, } def __init__( @@ -6639,6 +6847,9 @@ def __init__( self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) self.table_option = kwargs.get('table_option', None) + self.sql_writer_use_table_lock = kwargs.get('sql_writer_use_table_lock', None) + self.write_behavior = kwargs.get('write_behavior', None) + self.upsert_settings = kwargs.get('upsert_settings', None) class AzureSqlSource(TabularSource): @@ -6660,12 +6871,15 @@ class AzureSqlSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database @@ -6675,14 +6889,14 @@ class AzureSqlSource(TabularSource): :param stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". :type stored_procedure_parameters: dict[str, - ~data_factory_management_client.models.StoredProcedureParameter] + ~azure.mgmt.datafactory.models.StoredProcedureParameter] :param produce_additional_types: Which additional types to produce. :type produce_additional_types: object :param partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". :type partition_option: object :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~data_factory_management_client.models.SqlPartitionSettings + :type partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings """ _validation = { @@ -6695,8 +6909,9 @@ class AzureSqlSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, @@ -6738,14 +6953,14 @@ class AzureSqlTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. :type table_name: object @@ -6799,23 +7014,23 @@ class AzureStorageLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: The connection string. It is mutually exclusive with sasUri property. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param account_key: The Azure key vault secret reference of accountKey in connection string. - :type account_key: ~data_factory_management_client.models.AzureKeyVaultSecretReference + :type account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param sas_uri: SAS URI of the Azure Storage resource. It is mutually exclusive with connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. :type sas_uri: object :param sas_token: The Azure key vault secret reference of sasToken in sas uri. - :type sas_token: ~data_factory_management_client.models.AzureKeyVaultSecretReference + :type sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -6872,14 +7087,14 @@ class AzureTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: Required. The table name of the Azure Table storage. Type: string (or Expression with resultType string). :type table_name: object @@ -6938,6 +7153,9 @@ class AzureTableSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param azure_table_default_partition_key_value: Azure Table default partition key value. Type: string (or Expression with resultType string). :type azure_table_default_partition_key_value: object @@ -6964,6 +7182,7 @@ class AzureTableSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'azure_table_default_partition_key_value': {'key': 'azureTableDefaultPartitionKeyValue', 'type': 'object'}, 'azure_table_partition_key_name': {'key': 'azureTablePartitionKeyName', 'type': 'object'}, 'azure_table_row_key_name': {'key': 'azureTableRowKeyName', 'type': 'object'}, @@ -7001,12 +7220,15 @@ class AzureTableSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param azure_table_source_query: Azure Table source query. Type: string (or Expression with resultType string). :type azure_table_source_query: object @@ -7025,8 +7247,9 @@ class AzureTableSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'azure_table_source_query': {'key': 'azureTableSourceQuery', 'type': 'object'}, 'azure_table_source_ignore_table_not_found': {'key': 'azureTableSourceIgnoreTableNotFound', 'type': 'object'}, } @@ -7052,23 +7275,23 @@ class AzureTableStorageLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: The connection string. It is mutually exclusive with sasUri property. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param account_key: The Azure key vault secret reference of accountKey in connection string. - :type account_key: ~data_factory_management_client.models.AzureKeyVaultSecretReference + :type account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param sas_uri: SAS URI of the Azure Storage resource. It is mutually exclusive with connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. :type sas_uri: object :param sas_token: The Azure key vault secret reference of sasToken in sas uri. - :type sas_token: ~data_factory_management_client.models.AzureKeyVaultSecretReference + :type sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -7125,18 +7348,18 @@ class BinaryDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param location: The location of the Binary storage. - :type location: ~data_factory_management_client.models.DatasetLocation + :type location: ~azure.mgmt.datafactory.models.DatasetLocation :param compression: The data compression method used for the binary dataset. - :type compression: ~data_factory_management_client.models.DatasetCompression + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression """ _validation = { @@ -7216,7 +7439,7 @@ class BinaryReadSettings(FormatReadSettings): :param type: Required. The read setting type.Constant filled by server. :type type: str :param compression_properties: Compression settings. - :type compression_properties: ~data_factory_management_client.models.CompressionReadSettings + :type compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings """ _validation = { @@ -7263,8 +7486,11 @@ class BinarySink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Binary store settings. - :type store_settings: ~data_factory_management_client.models.StoreWriteSettings + :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings """ _validation = { @@ -7279,6 +7505,7 @@ class BinarySink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, } @@ -7310,10 +7537,13 @@ class BinarySource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Binary store settings. - :type store_settings: ~data_factory_management_client.models.StoreReadSettings + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :param format_settings: Binary format settings. - :type format_settings: ~data_factory_management_client.models.BinaryReadSettings + :type format_settings: ~azure.mgmt.datafactory.models.BinaryReadSettings """ _validation = { @@ -7326,6 +7556,7 @@ class BinarySource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'BinaryReadSettings'}, } @@ -7359,7 +7590,7 @@ class Trigger(msrest.serialization.Model): :type description: str :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". - :vartype runtime_state: str or ~data_factory_management_client.models.TriggerRuntimeState + :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState :param annotations: List of tags that can be used for describing the trigger. :type annotations: list[object] """ @@ -7412,11 +7643,11 @@ class MultiplePipelineTrigger(Trigger): :type description: str :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". - :vartype runtime_state: str or ~data_factory_management_client.models.TriggerRuntimeState + :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState :param annotations: List of tags that can be used for describing the trigger. :type annotations: list[object] :param pipelines: Pipelines that need to be started. - :type pipelines: list[~data_factory_management_client.models.TriggerPipelineReference] + :type pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] """ _validation = { @@ -7462,11 +7693,11 @@ class BlobEventsTrigger(MultiplePipelineTrigger): :type description: str :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". - :vartype runtime_state: str or ~data_factory_management_client.models.TriggerRuntimeState + :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState :param annotations: List of tags that can be used for describing the trigger. :type annotations: list[object] :param pipelines: Pipelines that need to be started. - :type pipelines: list[~data_factory_management_client.models.TriggerPipelineReference] + :type pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] :param blob_path_begins_with: The blob path must begin with the pattern provided for trigger to fire. For example, '/records/blobs/december/' will only fire the trigger for blobs in the december folder under the records container. At least one of these must be provided: @@ -7479,7 +7710,7 @@ class BlobEventsTrigger(MultiplePipelineTrigger): :param ignore_empty_blobs: If set to true, blobs with zero bytes will be ignored. :type ignore_empty_blobs: bool :param events: Required. The type of events that cause this trigger to fire. - :type events: list[str or ~data_factory_management_client.models.BlobEventTypes] + :type events: list[str or ~azure.mgmt.datafactory.models.BlobEventTypes] :param scope: Required. The ARM resource ID of the Storage Account. :type scope: str """ @@ -7543,6 +7774,9 @@ class BlobSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param blob_writer_overwrite_files: Blob writer overwrite files. Type: boolean (or Expression with resultType boolean). :type blob_writer_overwrite_files: object @@ -7554,6 +7788,9 @@ class BlobSink(CopySink): :type blob_writer_add_header: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object + :param metadata: Specify the custom metadata to be added to sink data. Type: array of objects + (or Expression with resultType array of objects). + :type metadata: list[~azure.mgmt.datafactory.models.MetadataItem] """ _validation = { @@ -7568,10 +7805,12 @@ class BlobSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'blob_writer_overwrite_files': {'key': 'blobWriterOverwriteFiles', 'type': 'object'}, 'blob_writer_date_time_format': {'key': 'blobWriterDateTimeFormat', 'type': 'object'}, 'blob_writer_add_header': {'key': 'blobWriterAddHeader', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'metadata': {'key': 'metadata', 'type': '[MetadataItem]'}, } def __init__( @@ -7584,6 +7823,7 @@ def __init__( self.blob_writer_date_time_format = kwargs.get('blob_writer_date_time_format', None) self.blob_writer_add_header = kwargs.get('blob_writer_add_header', None) self.copy_behavior = kwargs.get('copy_behavior', None) + self.metadata = kwargs.get('metadata', None) class BlobSource(CopySource): @@ -7605,6 +7845,9 @@ class BlobSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param treat_empty_as_null: Treat empty as null. Type: boolean (or Expression with resultType boolean). :type treat_empty_as_null: object @@ -7626,6 +7869,7 @@ class BlobSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, @@ -7658,18 +7902,18 @@ class BlobTrigger(MultiplePipelineTrigger): :type description: str :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". - :vartype runtime_state: str or ~data_factory_management_client.models.TriggerRuntimeState + :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState :param annotations: List of tags that can be used for describing the trigger. :type annotations: list[object] :param pipelines: Pipelines that need to be started. - :type pipelines: list[~data_factory_management_client.models.TriggerPipelineReference] + :type pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] :param folder_path: Required. The path of the container/folder that will trigger the pipeline. :type folder_path: str :param max_concurrency: Required. The max number of parallel files to handle when it is triggered. :type max_concurrency: int :param linked_service: Required. The Azure Storage linked service reference. - :type linked_service: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference """ _validation = { @@ -7714,11 +7958,11 @@ class CassandraLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param host: Required. Host name for connection. Type: string (or Expression with resultType @@ -7734,7 +7978,7 @@ class CassandraLinkedService(LinkedService): string). :type username: object :param password: Password for authentication. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -7794,12 +8038,15 @@ class CassandraSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: Database query. Should be a SQL-92 query expression or Cassandra Query Language (CQL) command. Type: string (or Expression with resultType string). :type query: object @@ -7810,7 +8057,7 @@ class CassandraSource(TabularSource): Possible values include: "ALL", "EACH_QUORUM", "QUORUM", "LOCAL_QUORUM", "ONE", "TWO", "THREE", "LOCAL_ONE", "SERIAL", "LOCAL_SERIAL". :type consistency_level: str or - ~data_factory_management_client.models.CassandraSourceReadConsistencyLevels + ~azure.mgmt.datafactory.models.CassandraSourceReadConsistencyLevels """ _validation = { @@ -7823,8 +8070,9 @@ class CassandraSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'consistency_level': {'key': 'consistencyLevel', 'type': 'str'}, } @@ -7858,14 +8106,14 @@ class CassandraTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name of the Cassandra database. Type: string (or Expression with resultType string). :type table_name: object @@ -7919,14 +8167,14 @@ class ChainingTrigger(Trigger): :type description: str :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". - :vartype runtime_state: str or ~data_factory_management_client.models.TriggerRuntimeState + :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState :param annotations: List of tags that can be used for describing the trigger. :type annotations: list[object] :param pipeline: Required. Pipeline for which runs are created when all upstream pipelines complete successfully. - :type pipeline: ~data_factory_management_client.models.TriggerPipelineReference + :type pipeline: ~azure.mgmt.datafactory.models.TriggerPipelineReference :param depends_on: Required. Upstream Pipelines. - :type depends_on: list[~data_factory_management_client.models.PipelineReference] + :type depends_on: list[~azure.mgmt.datafactory.models.PipelineReference] :param run_dimension: Required. Run Dimension property that needs to be emitted by upstream pipelines. :type run_dimension: str @@ -7974,7 +8222,7 @@ class CloudError(msrest.serialization.Model): :param target: Property name/path in request associated with error. :type target: str :param details: Array with additional error details. - :type details: list[~data_factory_management_client.models.CloudError] + :type details: list[~azure.mgmt.datafactory.models.CloudError] """ _validation = { @@ -8012,7 +8260,7 @@ class CmdkeySetup(CustomSetupBase): :param user_name: Required. The user name of data source access. :type user_name: object :param password: Required. The password of data source access. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase """ _validation = { @@ -8079,14 +8327,14 @@ class CommonDataServiceForAppsEntityDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param entity_name: The logical name of the entity. Type: string (or Expression with resultType string). :type entity_name: object @@ -8130,18 +8378,18 @@ class CommonDataServiceForAppsLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param deployment_type: Required. The deployment type of the Common Data Service for Apps instance. 'Online' for Common Data Service for Apps Online and 'OnPremisesWithIfd' for Common Data Service for Apps on-premises with Ifd. Type: string (or Expression with resultType - string). Possible values include: "Online", "OnPremisesWithIfd". - :type deployment_type: str or ~data_factory_management_client.models.DynamicsDeploymentType + string). + :type deployment_type: object :param host_name: The host name of the on-premises Common Data Service for Apps server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). @@ -8162,30 +8410,26 @@ class CommonDataServiceForAppsLinkedService(LinkedService): :param authentication_type: Required. The authentication type to connect to Common Data Service for Apps server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario. 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or - Expression with resultType string). Possible values include: "Office365", "Ifd", - "AADServicePrincipal". - :type authentication_type: str or - ~data_factory_management_client.models.DynamicsAuthenticationType + Expression with resultType string). + :type authentication_type: object :param username: User name to access the Common Data Service for Apps instance. Type: string (or Expression with resultType string). :type username: object :param password: Password to access the Common Data Service for Apps instance. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param service_principal_id: The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string (or Expression with resultType string). :type service_principal_id: object :param service_principal_credential_type: The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' - for certificate. Type: string (or Expression with resultType string). Possible values include: - "ServicePrincipalKey", "ServicePrincipalCert". - :type service_principal_credential_type: str or - ~data_factory_management_client.models.DynamicsServicePrincipalCredentialType + for certificate. Type: string (or Expression with resultType string). + :type service_principal_credential_type: object :param service_principal_credential: The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be AzureKeyVaultSecretReference. - :type service_principal_credential: ~data_factory_management_client.models.SecretBase + :type service_principal_credential: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -8205,16 +8449,16 @@ class CommonDataServiceForAppsLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'object'}, 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, 'port': {'key': 'typeProperties.port', 'type': 'object'}, 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'}, + 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'object'}, 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -8264,9 +8508,12 @@ class CommonDataServiceForAppsSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param write_behavior: Required. The write behavior for the operation. Possible values include: "Upsert". - :type write_behavior: str or ~data_factory_management_client.models.DynamicsSinkWriteBehavior + :type write_behavior: str or ~azure.mgmt.datafactory.models.DynamicsSinkWriteBehavior :param ignore_null_values: The flag indicating whether to ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). @@ -8289,6 +8536,7 @@ class CommonDataServiceForAppsSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, 'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'}, @@ -8324,12 +8572,15 @@ class CommonDataServiceForAppsSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: FetchXML is a proprietary query language that is used in Microsoft Common Data Service for Apps (online & on-premises). Type: string (or Expression with resultType string). :type query: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -8342,8 +8593,9 @@ class CommonDataServiceForAppsSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -8366,7 +8618,7 @@ class ComponentSetup(CustomSetupBase): :param component_name: Required. The name of the 3rd party component. :type component_name: str :param license_key: The license key to activate the component. - :type license_key: ~data_factory_management_client.models.SecretBase + :type license_key: ~azure.mgmt.datafactory.models.SecretBase """ _validation = { @@ -8438,11 +8690,11 @@ class ConcurLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_properties: Properties used to connect to Concur. It is mutually exclusive @@ -8454,7 +8706,7 @@ class ConcurLinkedService(LinkedService): :type username: object :param password: The password corresponding to the user name that you provided in the username field. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. :type use_encrypted_endpoints: object @@ -8529,14 +8781,14 @@ class ConcurObjectDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ @@ -8587,12 +8839,15 @@ class ConcurSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -8608,8 +8863,9 @@ class ConcurSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -8672,9 +8928,9 @@ class ControlActivity(Activity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] """ _validation = { @@ -8714,28 +8970,28 @@ class CopyActivity(ExecutionActivity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param linked_service_name: Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. - :type policy: ~data_factory_management_client.models.ActivityPolicy + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param inputs: List of inputs for the activity. - :type inputs: list[~data_factory_management_client.models.DatasetReference] + :type inputs: list[~azure.mgmt.datafactory.models.DatasetReference] :param outputs: List of outputs for the activity. - :type outputs: list[~data_factory_management_client.models.DatasetReference] + :type outputs: list[~azure.mgmt.datafactory.models.DatasetReference] :param source: Required. Copy activity source. - :type source: ~data_factory_management_client.models.CopySource + :type source: ~azure.mgmt.datafactory.models.CopySource :param sink: Required. Copy activity sink. - :type sink: ~data_factory_management_client.models.CopySink + :type sink: ~azure.mgmt.datafactory.models.CopySink :param translator: Copy activity translator. If not specified, tabular translator is used. :type translator: object :param enable_staging: Specifies whether to copy data via an interim staging. Default value is false. Type: boolean (or Expression with resultType boolean). :type enable_staging: object :param staging_settings: Specifies interim staging settings when EnableStaging is true. - :type staging_settings: ~data_factory_management_client.models.StagingSettings + :type staging_settings: ~azure.mgmt.datafactory.models.StagingSettings :param parallel_copies: Maximum number of concurrent sessions opened on the source or sink to avoid overloading the data store. Type: integer (or Expression with resultType integer), minimum: 0. @@ -8749,12 +9005,12 @@ class CopyActivity(ExecutionActivity): :param redirect_incompatible_row_settings: Redirect incompatible row settings when EnableSkipIncompatibleRow is true. :type redirect_incompatible_row_settings: - ~data_factory_management_client.models.RedirectIncompatibleRowSettings + ~azure.mgmt.datafactory.models.RedirectIncompatibleRowSettings :param log_storage_settings: (Deprecated. Please use LogSettings) Log storage settings customer need to provide when enabling session log. - :type log_storage_settings: ~data_factory_management_client.models.LogStorageSettings + :type log_storage_settings: ~azure.mgmt.datafactory.models.LogStorageSettings :param log_settings: Log settings customer needs provide when enabling log. - :type log_settings: ~data_factory_management_client.models.LogSettings + :type log_settings: ~azure.mgmt.datafactory.models.LogSettings :param preserve_rules: Preserve Rules. :type preserve_rules: list[object] :param preserve: Preserve rules. @@ -8763,7 +9019,7 @@ class CopyActivity(ExecutionActivity): (or Expression with resultType boolean). :type validate_data_consistency: object :param skip_error_file: Specify the fault tolerance for data consistency. - :type skip_error_file: ~data_factory_management_client.models.SkipErrorFile + :type skip_error_file: ~azure.mgmt.datafactory.models.SkipErrorFile """ _validation = { @@ -8899,11 +9155,11 @@ class CosmosDbLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: The connection string. Type: string, SecureString or @@ -8916,7 +9172,7 @@ class CosmosDbLinkedService(LinkedService): :type database: object :param account_key: The account key of the Azure CosmosDB account. Type: SecureString or AzureKeyVaultSecretReference. - :type account_key: ~data_factory_management_client.models.SecretBase + :type account_key: ~azure.mgmt.datafactory.models.SecretBase :param service_principal_id: The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string (or Expression with resultType string). :type service_principal_id: object @@ -8925,13 +9181,13 @@ class CosmosDbLinkedService(LinkedService): for certificate. Type: string (or Expression with resultType string). Possible values include: "ServicePrincipalKey", "ServicePrincipalCert". :type service_principal_credential_type: str or - ~data_factory_management_client.models.CosmosDbServicePrincipalCredentialType + ~azure.mgmt.datafactory.models.CosmosDbServicePrincipalCredentialType :param service_principal_credential: The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be AzureKeyVaultSecretReference. - :type service_principal_credential: ~data_factory_management_client.models.SecretBase + :type service_principal_credential: ~azure.mgmt.datafactory.models.SecretBase :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object @@ -8941,7 +9197,7 @@ class CosmosDbLinkedService(LinkedService): :type azure_cloud_type: object :param connection_mode: The connection mode used to access CosmosDB account. Type: string (or Expression with resultType string). Possible values include: "Gateway", "Direct". - :type connection_mode: str or ~data_factory_management_client.models.CosmosDbConnectionMode + :type connection_mode: str or ~azure.mgmt.datafactory.models.CosmosDbConnectionMode :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -9010,14 +9266,14 @@ class CosmosDbMongoDbApiCollectionDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param collection: Required. The collection name of the CosmosDB (MongoDB API) database. Type: string (or Expression with resultType string). :type collection: object @@ -9062,13 +9318,16 @@ class CosmosDbMongoDbApiLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] + :param is_server_version_above32: Whether the CosmosDB (MongoDB API) server version is higher + than 3.2. The default value is false. Type: boolean (or Expression with resultType boolean). + :type is_server_version_above32: object :param connection_string: Required. The CosmosDB (MongoDB API) connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. @@ -9091,6 +9350,7 @@ class CosmosDbMongoDbApiLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'is_server_version_above32': {'key': 'typeProperties.isServerVersionAbove32', 'type': 'object'}, 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'database': {'key': 'typeProperties.database', 'type': 'object'}, } @@ -9101,6 +9361,7 @@ def __init__( ): super(CosmosDbMongoDbApiLinkedService, self).__init__(**kwargs) self.type = 'CosmosDbMongoDbApi' # type: str + self.is_server_version_above32 = kwargs.get('is_server_version_above32', None) self.connection_string = kwargs['connection_string'] self.database = kwargs['database'] @@ -9130,6 +9391,9 @@ class CosmosDbMongoDbApiSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param write_behavior: Specifies whether the document with same key to be overwritten (upsert) rather than throw exception (insert). The default value is "insert". Type: string (or Expression with resultType string). Type: string (or Expression with resultType string). @@ -9148,6 +9412,7 @@ class CosmosDbMongoDbApiSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } @@ -9179,12 +9444,15 @@ class CosmosDbMongoDbApiSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param filter: Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). :type filter: object :param cursor_methods: Cursor methods for Mongodb query. - :type cursor_methods: ~data_factory_management_client.models.MongoDbCursorMethodsProperties + :type cursor_methods: ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties :param batch_size: Specifies the number of documents to return in each batch of the response from MongoDB instance. In most cases, modifying the batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response size. @@ -9194,8 +9462,8 @@ class CosmosDbMongoDbApiSource(CopySource): pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -9208,11 +9476,12 @@ class CosmosDbMongoDbApiSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'filter': {'key': 'filter', 'type': 'object'}, 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -9247,14 +9516,14 @@ class CosmosDbSqlApiCollectionDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param collection_name: Required. CosmosDB (SQL API) collection name. Type: string (or Expression with resultType string). :type collection_name: object @@ -9313,6 +9582,9 @@ class CosmosDbSqlApiSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param write_behavior: Describes how to write data to Azure Cosmos DB. Type: string (or Expression with resultType string). Allowed values: insert and upsert. :type write_behavior: object @@ -9330,6 +9602,7 @@ class CosmosDbSqlApiSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } @@ -9361,6 +9634,9 @@ class CosmosDbSqlApiSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: SQL API query. Type: string (or Expression with resultType string). :type query: object :param page_size: Page size of the result. Type: integer (or Expression with resultType @@ -9373,8 +9649,8 @@ class CosmosDbSqlApiSource(CopySource): Expression with resultType boolean). :type detect_datetime: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -9387,11 +9663,12 @@ class CosmosDbSqlApiSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'page_size': {'key': 'pageSize', 'type': 'object'}, 'preferred_regions': {'key': 'preferredRegions', 'type': 'object'}, 'detect_datetime': {'key': 'detectDatetime', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -9418,18 +9695,18 @@ class CouchbaseLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param cred_string: The Azure key vault secret reference of credString in connection string. - :type cred_string: ~data_factory_management_client.models.AzureKeyVaultSecretReference + :type cred_string: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -9482,12 +9759,15 @@ class CouchbaseSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -9503,8 +9783,9 @@ class CouchbaseSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -9536,14 +9817,14 @@ class CouchbaseTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ @@ -9587,8 +9868,7 @@ class CreateDataFlowDebugSessionRequest(msrest.serialization.Model): :param time_to_live: Time to live setting of the cluster in minutes. :type time_to_live: int :param integration_runtime: Set to use integration runtime setting for data flow debug session. - :type integration_runtime: - ~data_factory_management_client.models.IntegrationRuntimeDebugResource + :type integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeDebugResource """ _attribute_map = { @@ -9691,6 +9971,172 @@ def __init__( self.run_id = kwargs['run_id'] +class Credential(msrest.serialization.Model): + """The Azure Data Factory nested object which contains the information and credential which can be used to connect with related store or compute resource. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: ManagedIdentityCredential, ServicePrincipalCredential. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of credential.Constant filled by server. + :type type: str + :param description: Credential description. + :type description: str + :param annotations: List of tags that can be used for describing the Credential. + :type annotations: list[object] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + } + + _subtype_map = { + 'type': {'ManagedIdentity': 'ManagedIdentityCredential', 'ServicePrincipal': 'ServicePrincipalCredential'} + } + + def __init__( + self, + **kwargs + ): + super(Credential, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = 'Credential' # type: str + self.description = kwargs.get('description', None) + self.annotations = kwargs.get('annotations', None) + + +class CredentialReference(msrest.serialization.Model): + """Credential reference type. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :ivar type: Required. Credential reference type. Default value: "CredentialReference". + :vartype type: str + :param reference_name: Required. Reference credential name. + :type reference_name: str + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + } + + type = "CredentialReference" + + def __init__( + self, + **kwargs + ): + super(CredentialReference, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.reference_name = kwargs['reference_name'] + + +class SubResource(msrest.serialization.Model): + """Azure Data Factory nested resource, which belongs to a factory. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SubResource, self).__init__(**kwargs) + self.id = None + self.name = None + self.type = None + self.etag = None + + +class CredentialResource(SubResource): + """Credential resource type. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Required. Properties of credentials. + :type properties: ~azure.mgmt.datafactory.models.Credential + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'Credential'}, + } + + def __init__( + self, + **kwargs + ): + super(CredentialResource, self).__init__(**kwargs) + self.properties = kwargs['properties'] + + class CustomActivity(ExecutionActivity): """Custom activity type. @@ -9706,23 +10152,23 @@ class CustomActivity(ExecutionActivity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param linked_service_name: Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. - :type policy: ~data_factory_management_client.models.ActivityPolicy + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param command: Required. Command for custom activity Type: string (or Expression with resultType string). :type command: object :param resource_linked_service: Resource linked service reference. - :type resource_linked_service: ~data_factory_management_client.models.LinkedServiceReference + :type resource_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :param folder_path: Folder path for resource files Type: string (or Expression with resultType string). :type folder_path: object :param reference_objects: Reference objects. - :type reference_objects: ~data_factory_management_client.models.CustomActivityReferenceObject + :type reference_objects: ~azure.mgmt.datafactory.models.CustomActivityReferenceObject :param extended_properties: User defined property bag. There is no restriction on the keys or values that can be used. The user specified custom activity has the full responsibility to consume and interpret the content defined. @@ -9778,9 +10224,9 @@ class CustomActivityReferenceObject(msrest.serialization.Model): """Reference objects for custom activity. :param linked_services: Linked service references. - :type linked_services: list[~data_factory_management_client.models.LinkedServiceReference] + :type linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] :param datasets: Dataset references. - :type datasets: list[~data_factory_management_client.models.DatasetReference] + :type datasets: list[~azure.mgmt.datafactory.models.DatasetReference] """ _attribute_map = { @@ -9816,14 +10262,14 @@ class CustomDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type_properties: Custom dataset properties. :type type_properties: object """ @@ -9866,11 +10312,11 @@ class CustomDataSourceLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type_properties: Required. Custom linked service properties. @@ -9917,11 +10363,11 @@ class CustomEventsTrigger(MultiplePipelineTrigger): :type description: str :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". - :vartype runtime_state: str or ~data_factory_management_client.models.TriggerRuntimeState + :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState :param annotations: List of tags that can be used for describing the trigger. :type annotations: list[object] :param pipelines: Pipelines that need to be started. - :type pipelines: list[~data_factory_management_client.models.TriggerPipelineReference] + :type pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] :param subject_begins_with: The event subject must begin with the pattern provided for trigger to fire. At least one of these must be provided: subjectBeginsWith, subjectEndsWith. :type subject_begins_with: str @@ -9981,13 +10427,13 @@ class DatabricksNotebookActivity(ExecutionActivity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param linked_service_name: Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. - :type policy: ~data_factory_management_client.models.ActivityPolicy + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param notebook_path: Required. The absolute path of the notebook to be run in the Databricks Workspace. This path must begin with a slash. Type: string (or Expression with resultType string). @@ -10045,13 +10491,13 @@ class DatabricksSparkJarActivity(ExecutionActivity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param linked_service_name: Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. - :type policy: ~data_factory_management_client.models.ActivityPolicy + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param main_class_name: Required. The full name of the class containing the main method to be executed. This class must be contained in a JAR provided as a library. Type: string (or Expression with resultType string). @@ -10108,13 +10554,13 @@ class DatabricksSparkPythonActivity(ExecutionActivity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param linked_service_name: Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. - :type policy: ~data_factory_management_client.models.ActivityPolicy + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param python_file: Required. The URI of the Python file to be executed. DBFS paths are supported. Type: string (or Expression with resultType string). :type python_file: object @@ -10161,7 +10607,9 @@ class DataFlow(msrest.serialization.Model): You probably want to use the sub-classes and not this class directly. Known sub-classes are: MappingDataFlow. - :param type: Type of data flow.Constant filled by server. + All required parameters must be populated in order to send to Azure. + + :param type: Required. Type of data flow.Constant filled by server. :type type: str :param description: The description of the data flow. :type description: str @@ -10169,9 +10617,13 @@ class DataFlow(msrest.serialization.Model): :type annotations: list[object] :param folder: The folder that this data flow is in. If not specified, Data flow will appear at the root level. - :type folder: ~data_factory_management_client.models.DataFlowFolder + :type folder: ~azure.mgmt.datafactory.models.DataFlowFolder """ + _validation = { + 'type': {'required': True}, + } + _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, @@ -10238,9 +10690,9 @@ class DataFlowDebugCommandRequest(msrest.serialization.Model): :type session_id: str :param command: The command type. Possible values include: "executePreviewQuery", "executeStatisticsQuery", "executeExpressionQuery". - :type command: str or ~data_factory_management_client.models.DataFlowDebugCommandType + :type command: str or ~azure.mgmt.datafactory.models.DataFlowDebugCommandType :param command_payload: The command payload object. - :type command_payload: ~data_factory_management_client.models.DataFlowDebugCommandPayload + :type command_payload: ~azure.mgmt.datafactory.models.DataFlowDebugCommandPayload """ _attribute_map = { @@ -10291,15 +10743,15 @@ class DataFlowDebugPackage(msrest.serialization.Model): :param session_id: The ID of data flow debug session. :type session_id: str :param data_flow: Data flow instance. - :type data_flow: ~data_factory_management_client.models.DataFlowDebugResource + :type data_flow: ~azure.mgmt.datafactory.models.DataFlowDebugResource :param datasets: List of datasets. - :type datasets: list[~data_factory_management_client.models.DatasetDebugResource] + :type datasets: list[~azure.mgmt.datafactory.models.DatasetDebugResource] :param linked_services: List of linked services. - :type linked_services: list[~data_factory_management_client.models.LinkedServiceDebugResource] + :type linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceDebugResource] :param staging: Staging info for debug session. - :type staging: ~data_factory_management_client.models.DataFlowStagingInfo + :type staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo :param debug_settings: Data flow debug settings. - :type debug_settings: ~data_factory_management_client.models.DataFlowDebugPackageDebugSettings + :type debug_settings: ~azure.mgmt.datafactory.models.DataFlowDebugPackageDebugSettings """ _attribute_map = { @@ -10330,7 +10782,7 @@ class DataFlowDebugPackageDebugSettings(msrest.serialization.Model): """Data flow debug settings. :param source_settings: Source setting for data flow debug. - :type source_settings: list[~data_factory_management_client.models.DataFlowSourceSetting] + :type source_settings: list[~azure.mgmt.datafactory.models.DataFlowSourceSetting] :param parameters: Data flow parameters. :type parameters: dict[str, object] :param dataset_parameters: Parameters for dataset. @@ -10380,7 +10832,7 @@ class DataFlowDebugResource(SubResourceDebugResource): :param name: The resource name. :type name: str :param properties: Required. Data flow properties. - :type properties: ~data_factory_management_client.models.DataFlow + :type properties: ~azure.mgmt.datafactory.models.DataFlow """ _validation = { @@ -10481,7 +10933,7 @@ class DataFlowListResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. :param value: Required. List of data flows. - :type value: list[~data_factory_management_client.models.DataFlowResource] + :type value: list[~azure.mgmt.datafactory.models.DataFlowResource] :param next_link: The link to the next page of results, if any remaining results exist. :type next_link: str """ @@ -10546,46 +10998,6 @@ def __init__( self.dataset_parameters = kwargs.get('dataset_parameters', None) -class SubResource(msrest.serialization.Model): - """Azure Data Factory nested resource, which belongs to a factory. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(SubResource, self).__init__(**kwargs) - self.id = None - self.name = None - self.type = None - self.etag = None - - class DataFlowResource(SubResource): """Data flow resource type. @@ -10602,7 +11014,7 @@ class DataFlowResource(SubResource): :ivar etag: Etag identifies change in the resource. :vartype etag: str :param properties: Required. Data flow properties. - :type properties: ~data_factory_management_client.models.DataFlow + :type properties: ~azure.mgmt.datafactory.models.DataFlow """ _validation = { @@ -10668,11 +11080,11 @@ class DataFlowSink(Transformation): :param description: Transformation description. :type description: str :param dataset: Dataset reference. - :type dataset: ~data_factory_management_client.models.DatasetReference + :type dataset: ~azure.mgmt.datafactory.models.DatasetReference :param linked_service: Linked service reference. - :type linked_service: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :param schema_linked_service: Schema linked service reference. - :type schema_linked_service: ~data_factory_management_client.models.LinkedServiceReference + :type schema_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference """ _validation = { @@ -10707,11 +11119,11 @@ class DataFlowSource(Transformation): :param description: Transformation description. :type description: str :param dataset: Dataset reference. - :type dataset: ~data_factory_management_client.models.DatasetReference + :type dataset: ~azure.mgmt.datafactory.models.DatasetReference :param linked_service: Linked service reference. - :type linked_service: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :param schema_linked_service: Schema linked service reference. - :type schema_linked_service: ~data_factory_management_client.models.LinkedServiceReference + :type schema_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference """ _validation = { @@ -10768,7 +11180,7 @@ class DataFlowStagingInfo(msrest.serialization.Model): """Staging info for execute data flow activity. :param linked_service: Staging linked service reference. - :type linked_service: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :param folder_path: Folder path for staging blob. Type: string (or Expression with resultType string). :type folder_path: object @@ -10803,18 +11215,18 @@ class DataLakeAnalyticsUsqlActivity(ExecutionActivity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param linked_service_name: Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. - :type policy: ~data_factory_management_client.models.ActivityPolicy + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param script_path: Required. Case-sensitive path to folder that contains the U-SQL script. Type: string (or Expression with resultType string). :type script_path: object :param script_linked_service: Required. Script linked service reference. - :type script_linked_service: ~data_factory_management_client.models.LinkedServiceReference + :type script_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :param degree_of_parallelism: The maximum number of nodes simultaneously used to run the job. Default value is 1. Type: integer (or Expression with resultType integer), minimum: 1. :type degree_of_parallelism: object @@ -10883,8 +11295,9 @@ class DatasetCompression(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset compression.Constant filled by server. - :type type: str + :param type: Required. Type of dataset compression. Type: string (or Expression with resultType + string).Constant filled by server. + :type type: object """ _validation = { @@ -10893,7 +11306,7 @@ class DatasetCompression(msrest.serialization.Model): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'object'}, } _subtype_map = { @@ -10917,8 +11330,9 @@ class DatasetBZip2Compression(DatasetCompression): :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset compression.Constant filled by server. - :type type: str + :param type: Required. Type of dataset compression. Type: string (or Expression with resultType + string).Constant filled by server. + :type type: object """ _validation = { @@ -10927,7 +11341,7 @@ class DatasetBZip2Compression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'object'}, } def __init__( @@ -10969,7 +11383,7 @@ class DatasetDebugResource(SubResourceDebugResource): :param name: The resource name. :type name: str :param properties: Required. Dataset properties. - :type properties: ~data_factory_management_client.models.Dataset + :type properties: ~azure.mgmt.datafactory.models.Dataset """ _validation = { @@ -10997,10 +11411,11 @@ class DatasetDeflateCompression(DatasetCompression): :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset compression.Constant filled by server. - :type type: str - :param level: The Deflate compression level. Possible values include: "Optimal", "Fastest". - :type level: str or ~data_factory_management_client.models.DatasetCompressionLevel + :param type: Required. Type of dataset compression. Type: string (or Expression with resultType + string).Constant filled by server. + :type type: object + :param level: The Deflate compression level. + :type level: object """ _validation = { @@ -11009,8 +11424,8 @@ class DatasetDeflateCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'object'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__( @@ -11049,10 +11464,11 @@ class DatasetGZipCompression(DatasetCompression): :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset compression.Constant filled by server. - :type type: str - :param level: The GZip compression level. Possible values include: "Optimal", "Fastest". - :type level: str or ~data_factory_management_client.models.DatasetCompressionLevel + :param type: Required. Type of dataset compression. Type: string (or Expression with resultType + string).Constant filled by server. + :type type: object + :param level: The GZip compression level. + :type level: object """ _validation = { @@ -11061,8 +11477,8 @@ class DatasetGZipCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'object'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__( @@ -11080,7 +11496,7 @@ class DatasetListResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. :param value: Required. List of datasets. - :type value: list[~data_factory_management_client.models.DatasetResource] + :type value: list[~azure.mgmt.datafactory.models.DatasetResource] :param next_link: The link to the next page of results, if any remaining results exist. :type next_link: str """ @@ -11156,7 +11572,7 @@ class DatasetResource(SubResource): :ivar etag: Etag identifies change in the resource. :vartype etag: str :param properties: Required. Dataset properties. - :type properties: ~data_factory_management_client.models.Dataset + :type properties: ~azure.mgmt.datafactory.models.Dataset """ _validation = { @@ -11219,8 +11635,9 @@ class DatasetTarCompression(DatasetCompression): :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset compression.Constant filled by server. - :type type: str + :param type: Required. Type of dataset compression. Type: string (or Expression with resultType + string).Constant filled by server. + :type type: object """ _validation = { @@ -11229,7 +11646,7 @@ class DatasetTarCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'object'}, } def __init__( @@ -11248,10 +11665,11 @@ class DatasetTarGZipCompression(DatasetCompression): :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset compression.Constant filled by server. - :type type: str - :param level: The TarGZip compression level. Possible values include: "Optimal", "Fastest". - :type level: str or ~data_factory_management_client.models.DatasetCompressionLevel + :param type: Required. Type of dataset compression. Type: string (or Expression with resultType + string).Constant filled by server. + :type type: object + :param level: The TarGZip compression level. + :type level: object """ _validation = { @@ -11260,8 +11678,8 @@ class DatasetTarGZipCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'object'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__( @@ -11281,10 +11699,11 @@ class DatasetZipDeflateCompression(DatasetCompression): :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset compression.Constant filled by server. - :type type: str - :param level: The ZipDeflate compression level. Possible values include: "Optimal", "Fastest". - :type level: str or ~data_factory_management_client.models.DatasetCompressionLevel + :param type: Required. Type of dataset compression. Type: string (or Expression with resultType + string).Constant filled by server. + :type type: object + :param level: The ZipDeflate compression level. + :type level: object """ _validation = { @@ -11293,8 +11712,8 @@ class DatasetZipDeflateCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'object'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__( @@ -11317,11 +11736,11 @@ class Db2LinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: The connection string. It is mutually exclusive with server, @@ -11336,12 +11755,12 @@ class Db2LinkedService(LinkedService): :type database: object :param authentication_type: AuthenticationType to be used for connection. It is mutually exclusive with connectionString property. Possible values include: "Basic". - :type authentication_type: str or ~data_factory_management_client.models.Db2AuthenticationType + :type authentication_type: str or ~azure.mgmt.datafactory.models.Db2AuthenticationType :param username: Username for authentication. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). :type username: object :param password: Password for authentication. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param package_collection: Under where packages are created when querying database. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). @@ -11413,12 +11832,15 @@ class Db2Source(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object """ @@ -11433,8 +11855,9 @@ class Db2Source(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -11466,14 +11889,14 @@ class Db2TableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. :type table_name: object @@ -11530,13 +11953,13 @@ class DeleteActivity(ExecutionActivity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param linked_service_name: Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. - :type policy: ~data_factory_management_client.models.ActivityPolicy + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param recursive: If true, files or sub-folders under current folder path will be deleted recursively. Default is false. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -11548,11 +11971,11 @@ class DeleteActivity(ExecutionActivity): :type enable_logging: object :param log_storage_settings: Log storage settings customer need to provide when enableLogging is true. - :type log_storage_settings: ~data_factory_management_client.models.LogStorageSettings + :type log_storage_settings: ~azure.mgmt.datafactory.models.LogStorageSettings :param dataset: Required. Delete activity dataset reference. - :type dataset: ~data_factory_management_client.models.DatasetReference + :type dataset: ~azure.mgmt.datafactory.models.DatasetReference :param store_settings: Delete activity store settings. - :type store_settings: ~data_factory_management_client.models.StoreReadSettings + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings """ _validation = { @@ -11631,16 +12054,16 @@ class DelimitedTextDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param location: The location of the delimited text storage. - :type location: ~data_factory_management_client.models.DatasetLocation + :type location: ~azure.mgmt.datafactory.models.DatasetLocation :param column_delimiter: The column delimiter. Type: string (or Expression with resultType string). :type column_delimiter: object @@ -11652,12 +12075,11 @@ class DelimitedTextDataset(Dataset): https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). :type encoding_name: object - :param compression_codec: Possible values include: "none", "gzip", "snappy", "lzo", "bzip2", - "deflate", "zipDeflate", "lz4", "tar", "tarGZip". - :type compression_codec: str or ~data_factory_management_client.models.CompressionCodec - :param compression_level: The data compression method used for DelimitedText. Possible values - include: "Optimal", "Fastest". - :type compression_level: str or ~data_factory_management_client.models.DatasetCompressionLevel + :param compression_codec: The data compressionCodec. Type: string (or Expression with + resultType string). + :type compression_codec: object + :param compression_level: The data compression method used for DelimitedText. + :type compression_level: object :param quote_char: The quote character. Type: string (or Expression with resultType string). :type quote_char: object :param escape_char: The escape character. Type: string (or Expression with resultType string). @@ -11689,8 +12111,8 @@ class DelimitedTextDataset(Dataset): 'column_delimiter': {'key': 'typeProperties.columnDelimiter', 'type': 'object'}, 'row_delimiter': {'key': 'typeProperties.rowDelimiter', 'type': 'object'}, 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, - 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'str'}, - 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'str'}, + 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, + 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'object'}, 'quote_char': {'key': 'typeProperties.quoteChar', 'type': 'object'}, 'escape_char': {'key': 'typeProperties.escapeChar', 'type': 'object'}, 'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'}, @@ -11729,7 +12151,7 @@ class DelimitedTextReadSettings(FormatReadSettings): input files. Type: integer (or Expression with resultType integer). :type skip_line_count: object :param compression_properties: Compression settings. - :type compression_properties: ~data_factory_management_client.models.CompressionReadSettings + :type compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings """ _validation = { @@ -11778,10 +12200,13 @@ class DelimitedTextSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: DelimitedText store settings. - :type store_settings: ~data_factory_management_client.models.StoreWriteSettings + :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings :param format_settings: DelimitedText format settings. - :type format_settings: ~data_factory_management_client.models.DelimitedTextWriteSettings + :type format_settings: ~azure.mgmt.datafactory.models.DelimitedTextWriteSettings """ _validation = { @@ -11796,6 +12221,7 @@ class DelimitedTextSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextWriteSettings'}, } @@ -11829,13 +12255,16 @@ class DelimitedTextSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: DelimitedText store settings. - :type store_settings: ~data_factory_management_client.models.StoreReadSettings + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :param format_settings: DelimitedText format settings. - :type format_settings: ~data_factory_management_client.models.DelimitedTextReadSettings + :type format_settings: ~azure.mgmt.datafactory.models.DelimitedTextReadSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -11848,9 +12277,10 @@ class DelimitedTextSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextReadSettings'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -12004,14 +12434,14 @@ class DocumentDbCollectionDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param collection_name: Required. Document Database collection name. Type: string (or Expression with resultType string). :type collection_name: object @@ -12070,6 +12500,9 @@ class DocumentDbCollectionSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param nesting_separator: Nested properties separator. Default is . (dot). Type: string (or Expression with resultType string). :type nesting_separator: object @@ -12090,6 +12523,7 @@ class DocumentDbCollectionSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } @@ -12123,6 +12557,9 @@ class DocumentDbCollectionSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: Documents query. Type: string (or Expression with resultType string). :type query: object :param nesting_separator: Nested properties separator. Type: string (or Expression with @@ -12132,8 +12569,8 @@ class DocumentDbCollectionSource(CopySource): pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -12146,10 +12583,11 @@ class DocumentDbCollectionSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -12175,18 +12613,18 @@ class DrillLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param pwd: The Azure key vault secret reference of password in connection string. - :type pwd: ~data_factory_management_client.models.AzureKeyVaultSecretReference + :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -12239,12 +12677,15 @@ class DrillSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -12260,8 +12701,9 @@ class DrillSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -12293,14 +12735,14 @@ class DrillTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. :type table_name: object @@ -12373,7 +12815,7 @@ class DwCopyCommandSettings(msrest.serialization.Model): default values in the property overwrite the DEFAULT constraint set in the DB, and identity column cannot have a default value. Type: array of objects (or Expression with resultType array of objects). - :type default_values: list[~data_factory_management_client.models.DwCopyCommandDefaultValue] + :type default_values: list[~azure.mgmt.datafactory.models.DwCopyCommandDefaultValue] :param additional_options: Additional options directly passed to SQL DW in Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalOptions": { "MAXERRORS": "1000", "DATEFORMAT": "'ymd'" }. @@ -12405,11 +12847,11 @@ class DynamicsAxLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param url: Required. The Dynamics AX (or Dynamics 365 Finance and Operations) instance OData @@ -12421,7 +12863,7 @@ class DynamicsAxLinkedService(LinkedService): :param service_principal_key: Required. Specify the application's key. Mark this field as a SecureString to store it securely in Data Factory, or reference a secret stored in Azure Key Vault. Type: string (or Expression with resultType string). - :type service_principal_key: ~data_factory_management_client.models.SecretBase + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: Required. Specify the tenant information (domain name or tenant ID) under which your application resides. Retrieve it by hovering the mouse in the top-right corner of the Azure portal. Type: string (or Expression with resultType string). @@ -12492,14 +12934,14 @@ class DynamicsAxResourceDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param path: Required. The path of the Dynamics AX OData entity. Type: string (or Expression with resultType string). :type path: object @@ -12552,12 +12994,15 @@ class DynamicsAxSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -12578,8 +13023,9 @@ class DynamicsAxSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } @@ -12613,14 +13059,14 @@ class DynamicsCrmEntityDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param entity_name: The logical name of the entity. Type: string (or Expression with resultType string). :type entity_name: object @@ -12664,18 +13110,17 @@ class DynamicsCrmLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param deployment_type: Required. The deployment type of the Dynamics CRM instance. 'Online' for Dynamics CRM Online and 'OnPremisesWithIfd' for Dynamics CRM on-premises with Ifd. Type: - string (or Expression with resultType string). Possible values include: "Online", - "OnPremisesWithIfd". - :type deployment_type: str or ~data_factory_management_client.models.DynamicsDeploymentType + string (or Expression with resultType string). + :type deployment_type: object :param host_name: The host name of the on-premises Dynamics CRM server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). @@ -12694,30 +13139,26 @@ class DynamicsCrmLinkedService(LinkedService): :param authentication_type: Required. The authentication type to connect to Dynamics CRM server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or - Expression with resultType string). Possible values include: "Office365", "Ifd", - "AADServicePrincipal". - :type authentication_type: str or - ~data_factory_management_client.models.DynamicsAuthenticationType + Expression with resultType string). + :type authentication_type: object :param username: User name to access the Dynamics CRM instance. Type: string (or Expression with resultType string). :type username: object :param password: Password to access the Dynamics CRM instance. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param service_principal_id: The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string (or Expression with resultType string). :type service_principal_id: object :param service_principal_credential_type: The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' - for certificate. Type: string (or Expression with resultType string). Possible values include: - "ServicePrincipalKey", "ServicePrincipalCert". - :type service_principal_credential_type: str or - ~data_factory_management_client.models.DynamicsServicePrincipalCredentialType + for certificate. Type: string (or Expression with resultType string). + :type service_principal_credential_type: object :param service_principal_credential: The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be AzureKeyVaultSecretReference. - :type service_principal_credential: ~data_factory_management_client.models.SecretBase + :type service_principal_credential: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -12737,16 +13178,16 @@ class DynamicsCrmLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'object'}, 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, 'port': {'key': 'typeProperties.port', 'type': 'object'}, 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'}, + 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'object'}, 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -12796,9 +13237,12 @@ class DynamicsCrmSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param write_behavior: Required. The write behavior for the operation. Possible values include: "Upsert". - :type write_behavior: str or ~data_factory_management_client.models.DynamicsSinkWriteBehavior + :type write_behavior: str or ~azure.mgmt.datafactory.models.DynamicsSinkWriteBehavior :param ignore_null_values: The flag indicating whether to ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). @@ -12821,6 +13265,7 @@ class DynamicsCrmSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, 'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'}, @@ -12856,12 +13301,15 @@ class DynamicsCrmSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: FetchXML is a proprietary query language that is used in Microsoft Dynamics CRM (online & on-premises). Type: string (or Expression with resultType string). :type query: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -12874,8 +13322,9 @@ class DynamicsCrmSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -12907,14 +13356,14 @@ class DynamicsEntityDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param entity_name: The logical name of the entity. Type: string (or Expression with resultType string). :type entity_name: object @@ -12958,17 +13407,17 @@ class DynamicsLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param deployment_type: Required. The deployment type of the Dynamics instance. 'Online' for Dynamics Online and 'OnPremisesWithIfd' for Dynamics on-premises with Ifd. Type: string (or - Expression with resultType string). Possible values include: "Online", "OnPremisesWithIfd". - :type deployment_type: str or ~data_factory_management_client.models.DynamicsDeploymentType + Expression with resultType string). + :type deployment_type: object :param host_name: The host name of the on-premises Dynamics server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). :type host_name: object @@ -12986,29 +13435,26 @@ class DynamicsLinkedService(LinkedService): :param authentication_type: Required. The authentication type to connect to Dynamics server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or Expression with - resultType string). Possible values include: "Office365", "Ifd", "AADServicePrincipal". - :type authentication_type: str or - ~data_factory_management_client.models.DynamicsAuthenticationType + resultType string). + :type authentication_type: object :param username: User name to access the Dynamics instance. Type: string (or Expression with resultType string). :type username: object :param password: Password to access the Dynamics instance. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param service_principal_id: The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string (or Expression with resultType string). :type service_principal_id: object :param service_principal_credential_type: The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' - for certificate. Type: string (or Expression with resultType string). Possible values include: - "ServicePrincipalKey", "ServicePrincipalCert". - :type service_principal_credential_type: str or - ~data_factory_management_client.models.DynamicsServicePrincipalCredentialType + for certificate. Type: string (or Expression with resultType string). + :type service_principal_credential_type: str :param service_principal_credential: The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be AzureKeyVaultSecretReference. - :type service_principal_credential: ~data_factory_management_client.models.SecretBase + :type service_principal_credential: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -13028,12 +13474,12 @@ class DynamicsLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'object'}, 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, 'port': {'key': 'typeProperties.port', 'type': 'object'}, 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, @@ -13087,9 +13533,12 @@ class DynamicsSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param write_behavior: Required. The write behavior for the operation. Possible values include: "Upsert". - :type write_behavior: str or ~data_factory_management_client.models.DynamicsSinkWriteBehavior + :type write_behavior: str or ~azure.mgmt.datafactory.models.DynamicsSinkWriteBehavior :param ignore_null_values: The flag indicating whether ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). @@ -13112,6 +13561,7 @@ class DynamicsSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, 'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'}, @@ -13147,12 +13597,15 @@ class DynamicsSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: FetchXML is a proprietary query language that is used in Microsoft Dynamics (online & on-premises). Type: string (or Expression with resultType string). :type query: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -13165,8 +13618,9 @@ class DynamicsSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -13190,11 +13644,11 @@ class EloquaLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param endpoint: Required. The endpoint of the Eloqua server. (i.e. eloqua.example.com). @@ -13203,7 +13657,7 @@ class EloquaLinkedService(LinkedService): sitename/username. (i.e. Eloqua/Alice). :type username: object :param password: The password corresponding to the user name. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. :type use_encrypted_endpoints: object @@ -13276,14 +13730,14 @@ class EloquaObjectDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ @@ -13334,12 +13788,15 @@ class EloquaSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -13355,8 +13812,9 @@ class EloquaSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -13384,7 +13842,7 @@ class EncryptionConfiguration(msrest.serialization.Model): :type key_version: str :param identity: User assigned identity to use to authenticate to customer's key vault. If not provided Managed Service Identity will be used. - :type identity: ~data_factory_management_client.models.CmkIdentityDefinition + :type identity: ~azure.mgmt.datafactory.models.CmkIdentityDefinition """ _validation = { @@ -13415,7 +13873,7 @@ class EntityReference(msrest.serialization.Model): :param type: The type of this referenced entity. Possible values include: "IntegrationRuntimeReference", "LinkedServiceReference". - :type type: str or ~data_factory_management_client.models.IntegrationRuntimeEntityReferenceType + :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeEntityReferenceType :param reference_name: The name of this referenced entity. :type reference_name: str """ @@ -13488,19 +13946,22 @@ class ExcelDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param location: The location of the excel storage. - :type location: ~data_factory_management_client.models.DatasetLocation - :param sheet_name: The sheet of excel file. Type: string (or Expression with resultType + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param sheet_name: The sheet name of excel file. Type: string (or Expression with resultType string). :type sheet_name: object + :param sheet_index: The sheet index of excel file and default value is 0. Type: integer (or + Expression with resultType integer). + :type sheet_index: object :param range: The partial data of one sheet. Type: string (or Expression with resultType string). :type range: object @@ -13509,7 +13970,7 @@ class ExcelDataset(Dataset): false. Type: boolean (or Expression with resultType boolean). :type first_row_as_header: object :param compression: The data compression method used for the json dataset. - :type compression: ~data_factory_management_client.models.DatasetCompression + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression :param null_value: The null value string. Type: string (or Expression with resultType string). :type null_value: object """ @@ -13531,6 +13992,7 @@ class ExcelDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, 'sheet_name': {'key': 'typeProperties.sheetName', 'type': 'object'}, + 'sheet_index': {'key': 'typeProperties.sheetIndex', 'type': 'object'}, 'range': {'key': 'typeProperties.range', 'type': 'object'}, 'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'}, 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, @@ -13545,6 +14007,7 @@ def __init__( self.type = 'Excel' # type: str self.location = kwargs.get('location', None) self.sheet_name = kwargs.get('sheet_name', None) + self.sheet_index = kwargs.get('sheet_index', None) self.range = kwargs.get('range', None) self.first_row_as_header = kwargs.get('first_row_as_header', None) self.compression = kwargs.get('compression', None) @@ -13570,11 +14033,14 @@ class ExcelSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Excel store settings. - :type store_settings: ~data_factory_management_client.models.StoreReadSettings + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -13587,8 +14053,9 @@ class ExcelSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -13616,22 +14083,21 @@ class ExecuteDataFlowActivity(ExecutionActivity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param linked_service_name: Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. - :type policy: ~data_factory_management_client.models.ActivityPolicy + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param data_flow: Required. Data flow reference. - :type data_flow: ~data_factory_management_client.models.DataFlowReference + :type data_flow: ~azure.mgmt.datafactory.models.DataFlowReference :param staging: Staging info for execute data flow activity. - :type staging: ~data_factory_management_client.models.DataFlowStagingInfo + :type staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo :param integration_runtime: The integration runtime reference. - :type integration_runtime: ~data_factory_management_client.models.IntegrationRuntimeReference + :type integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param compute: Compute properties for data flow activity. - :type compute: - ~data_factory_management_client.models.ExecuteDataFlowActivityTypePropertiesCompute + :type compute: ~azure.mgmt.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute :param trace_level: Trace level setting used for data flow monitoring output. Supported values are: 'coarse', 'fine', and 'none'. Type: string (or Expression with resultType string). :type trace_level: object @@ -13724,11 +14190,11 @@ class ExecutePipelineActivity(Activity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param pipeline: Required. Pipeline reference. - :type pipeline: ~data_factory_management_client.models.PipelineReference + :type pipeline: ~azure.mgmt.datafactory.models.PipelineReference :param parameters: Pipeline parameters. :type parameters: dict[str, object] :param wait_on_completion: Defines whether activity execution will wait for the dependent @@ -13780,15 +14246,15 @@ class ExecuteSsisPackageActivity(ExecutionActivity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param linked_service_name: Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. - :type policy: ~data_factory_management_client.models.ActivityPolicy + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param package_location: Required. SSIS package location. - :type package_location: ~data_factory_management_client.models.SsisPackageLocation + :type package_location: ~azure.mgmt.datafactory.models.SsisPackageLocation :param runtime: Specifies the runtime to execute SSIS package. The value should be "x86" or "x64". Type: string (or Expression with resultType string). :type runtime: object @@ -13799,15 +14265,13 @@ class ExecuteSsisPackageActivity(ExecutionActivity): Expression with resultType string). :type environment_path: object :param execution_credential: The package execution credential. - :type execution_credential: ~data_factory_management_client.models.SsisExecutionCredential + :type execution_credential: ~azure.mgmt.datafactory.models.SsisExecutionCredential :param connect_via: Required. The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param project_parameters: The project level parameters to execute the SSIS package. - :type project_parameters: dict[str, - ~data_factory_management_client.models.SsisExecutionParameter] + :type project_parameters: dict[str, ~azure.mgmt.datafactory.models.SsisExecutionParameter] :param package_parameters: The package level parameters to execute the SSIS package. - :type package_parameters: dict[str, - ~data_factory_management_client.models.SsisExecutionParameter] + :type package_parameters: dict[str, ~azure.mgmt.datafactory.models.SsisExecutionParameter] :param project_connection_managers: The project level connection managers to execute the SSIS package. :type project_connection_managers: dict[str, object] @@ -13815,10 +14279,9 @@ class ExecuteSsisPackageActivity(ExecutionActivity): package. :type package_connection_managers: dict[str, object] :param property_overrides: The property overrides to execute the SSIS package. - :type property_overrides: dict[str, - ~data_factory_management_client.models.SsisPropertyOverride] + :type property_overrides: dict[str, ~azure.mgmt.datafactory.models.SsisPropertyOverride] :param log_location: SSIS package execution log location. - :type log_location: ~data_factory_management_client.models.SsisLogLocation + :type log_location: ~azure.mgmt.datafactory.models.SsisLogLocation """ _validation = { @@ -13877,8 +14340,7 @@ class ExposureControlBatchRequest(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. :param exposure_control_requests: Required. List of exposure control features. - :type exposure_control_requests: - list[~data_factory_management_client.models.ExposureControlRequest] + :type exposure_control_requests: list[~azure.mgmt.datafactory.models.ExposureControlRequest] """ _validation = { @@ -13903,8 +14365,7 @@ class ExposureControlBatchResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. :param exposure_control_responses: Required. List of exposure control feature values. - :type exposure_control_responses: - list[~data_factory_management_client.models.ExposureControlResponse] + :type exposure_control_responses: list[~azure.mgmt.datafactory.models.ExposureControlResponse] """ _validation = { @@ -14078,7 +14539,7 @@ class Factory(Resource): collection. :type additional_properties: dict[str, object] :param identity: Managed service identity of the factory. - :type identity: ~data_factory_management_client.models.FactoryIdentity + :type identity: ~azure.mgmt.datafactory.models.FactoryIdentity :ivar provisioning_state: Factory provisioning state, example Succeeded. :vartype provisioning_state: str :ivar create_time: Time the factory was created in ISO8601 format. @@ -14086,15 +14547,14 @@ class Factory(Resource): :ivar version: Version of the factory. :vartype version: str :param repo_configuration: Git repo information of the factory. - :type repo_configuration: ~data_factory_management_client.models.FactoryRepoConfiguration + :type repo_configuration: ~azure.mgmt.datafactory.models.FactoryRepoConfiguration :param global_parameters: List of parameters for factory. - :type global_parameters: dict[str, - ~data_factory_management_client.models.GlobalParameterSpecification] + :type global_parameters: dict[str, ~azure.mgmt.datafactory.models.GlobalParameterSpecification] :param encryption: Properties to enable Customer Managed Key for the factory. - :type encryption: ~data_factory_management_client.models.EncryptionConfiguration + :type encryption: ~azure.mgmt.datafactory.models.EncryptionConfiguration :param public_network_access: Whether or not public network access is allowed for the data factory. Possible values include: "Enabled", "Disabled". - :type public_network_access: str or ~data_factory_management_client.models.PublicNetworkAccess + :type public_network_access: str or ~azure.mgmt.datafactory.models.PublicNetworkAccess """ _validation = { @@ -14254,7 +14714,7 @@ class FactoryIdentity(msrest.serialization.Model): :param type: Required. The identity type. Possible values include: "SystemAssigned", "UserAssigned", "SystemAssigned,UserAssigned". - :type type: str or ~data_factory_management_client.models.FactoryIdentityType + :type type: str or ~azure.mgmt.datafactory.models.FactoryIdentityType :ivar principal_id: The principal id of the identity. :vartype principal_id: str :ivar tenant_id: The client tenant id of the identity. @@ -14293,7 +14753,7 @@ class FactoryListResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. :param value: Required. List of factories. - :type value: list[~data_factory_management_client.models.Factory] + :type value: list[~azure.mgmt.datafactory.models.Factory] :param next_link: The link to the next page of results, if any remaining results exist. :type next_link: str """ @@ -14322,7 +14782,7 @@ class FactoryRepoUpdate(msrest.serialization.Model): :param factory_resource_id: The factory resource id. :type factory_resource_id: str :param repo_configuration: Git repo information of the factory. - :type repo_configuration: ~data_factory_management_client.models.FactoryRepoConfiguration + :type repo_configuration: ~azure.mgmt.datafactory.models.FactoryRepoConfiguration """ _attribute_map = { @@ -14345,7 +14805,7 @@ class FactoryUpdateParameters(msrest.serialization.Model): :param tags: A set of tags. The resource tags. :type tags: dict[str, str] :param identity: Managed service identity of the factory. - :type identity: ~data_factory_management_client.models.FactoryIdentity + :type identity: ~azure.mgmt.datafactory.models.FactoryIdentity """ _attribute_map = { @@ -14426,11 +14886,11 @@ class FileServerLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param host: Required. Host name of the server. Type: string (or Expression with resultType @@ -14440,7 +14900,7 @@ class FileServerLinkedService(LinkedService): string). :type user_id: object :param password: Password to logon the server. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -14527,6 +14987,9 @@ class FileServerReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -14567,6 +15030,7 @@ class FileServerReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -14610,6 +15074,9 @@ class FileServerWriteSettings(StoreWriteSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object """ @@ -14622,6 +15089,7 @@ class FileServerWriteSettings(StoreWriteSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } @@ -14652,14 +15120,14 @@ class FileShareDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param folder_path: The path of the on-premises file system. Type: string (or Expression with resultType string). :type folder_path: object @@ -14673,12 +15141,12 @@ class FileShareDataset(Dataset): with resultType string). :type modified_datetime_end: object :param format: The format of the files. - :type format: ~data_factory_management_client.models.DatasetStorageFormat + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :param file_filter: Specify a filter to be used to select a subset of files in the folderPath rather than all files. Type: string (or Expression with resultType string). :type file_filter: object :param compression: The data compression method used for the file system. - :type compression: ~data_factory_management_client.models.DatasetCompression + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression """ _validation = { @@ -14745,6 +15213,9 @@ class FileSystemSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object """ @@ -14761,6 +15232,7 @@ class FileSystemSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } @@ -14792,12 +15264,15 @@ class FileSystemSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -14810,8 +15285,9 @@ class FileSystemSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -14839,13 +15315,13 @@ class FilterActivity(Activity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param items: Required. Input array on which filter should be applied. - :type items: ~data_factory_management_client.models.Expression + :type items: ~azure.mgmt.datafactory.models.Expression :param condition: Required. Condition to be used for filtering the input. - :type condition: ~data_factory_management_client.models.Expression + :type condition: ~azure.mgmt.datafactory.models.Expression """ _validation = { @@ -14891,18 +15367,18 @@ class ForEachActivity(Activity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param is_sequential: Should the loop be executed in sequence or in parallel (max 50). :type is_sequential: bool :param batch_count: Batch count to be used for controlling the number of parallel execution (when isSequential is set to false). :type batch_count: int :param items: Required. Collection to iterate. - :type items: ~data_factory_management_client.models.Expression + :type items: ~azure.mgmt.datafactory.models.Expression :param activities: Required. List of activities to execute . - :type activities: list[~data_factory_management_client.models.Activity] + :type activities: list[~azure.mgmt.datafactory.models.Activity] """ _validation = { @@ -14951,6 +15427,9 @@ class FtpReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -14984,6 +15463,7 @@ class FtpReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -15021,11 +15501,11 @@ class FtpServerLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param host: Required. Host name of the FTP server. Type: string (or Expression with resultType @@ -15036,12 +15516,12 @@ class FtpServerLinkedService(LinkedService): :type port: object :param authentication_type: The authentication type to be used to connect to the FTP server. Possible values include: "Basic", "Anonymous". - :type authentication_type: str or ~data_factory_management_client.models.FtpAuthenticationType + :type authentication_type: str or ~azure.mgmt.datafactory.models.FtpAuthenticationType :param user_name: Username to logon the FTP server. Type: string (or Expression with resultType string). :type user_name: object :param password: Password to logon the FTP server. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -15169,21 +15649,21 @@ class GetMetadataActivity(ExecutionActivity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param linked_service_name: Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. - :type policy: ~data_factory_management_client.models.ActivityPolicy + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param dataset: Required. GetMetadata activity dataset reference. - :type dataset: ~data_factory_management_client.models.DatasetReference + :type dataset: ~azure.mgmt.datafactory.models.DatasetReference :param field_list: Fields of metadata to get from dataset. :type field_list: list[object] :param store_settings: GetMetadata activity store settings. - :type store_settings: ~data_factory_management_client.models.StoreReadSettings + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :param format_settings: GetMetadata activity format settings. - :type format_settings: ~data_factory_management_client.models.FormatReadSettings + :type format_settings: ~azure.mgmt.datafactory.models.FormatReadSettings """ _validation = { @@ -15247,6 +15727,8 @@ class GitHubAccessTokenRequest(msrest.serialization.Model): :type git_hub_access_code: str :param git_hub_client_id: GitHub application client ID. :type git_hub_client_id: str + :param git_hub_client_secret: GitHub bring your own app client secret information. + :type git_hub_client_secret: ~azure.mgmt.datafactory.models.GitHubClientSecret :param git_hub_access_token_base_url: Required. GitHub access token base URL. :type git_hub_access_token_base_url: str """ @@ -15259,6 +15741,7 @@ class GitHubAccessTokenRequest(msrest.serialization.Model): _attribute_map = { 'git_hub_access_code': {'key': 'gitHubAccessCode', 'type': 'str'}, 'git_hub_client_id': {'key': 'gitHubClientId', 'type': 'str'}, + 'git_hub_client_secret': {'key': 'gitHubClientSecret', 'type': 'GitHubClientSecret'}, 'git_hub_access_token_base_url': {'key': 'gitHubAccessTokenBaseUrl', 'type': 'str'}, } @@ -15269,6 +15752,7 @@ def __init__( super(GitHubAccessTokenRequest, self).__init__(**kwargs) self.git_hub_access_code = kwargs['git_hub_access_code'] self.git_hub_client_id = kwargs.get('git_hub_client_id', None) + self.git_hub_client_secret = kwargs.get('git_hub_client_secret', None) self.git_hub_access_token_base_url = kwargs['git_hub_access_token_base_url'] @@ -15291,6 +15775,29 @@ def __init__( self.git_hub_access_token = kwargs.get('git_hub_access_token', None) +class GitHubClientSecret(msrest.serialization.Model): + """Client secret information for factory's bring your own app repository configuration. + + :param byoa_secret_akv_url: Bring your own app client secret AKV URL. + :type byoa_secret_akv_url: str + :param byoa_secret_name: Bring your own app client secret name in AKV. + :type byoa_secret_name: str + """ + + _attribute_map = { + 'byoa_secret_akv_url': {'key': 'byoaSecretAkvUrl', 'type': 'str'}, + 'byoa_secret_name': {'key': 'byoaSecretName', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(GitHubClientSecret, self).__init__(**kwargs) + self.byoa_secret_akv_url = kwargs.get('byoa_secret_akv_url', None) + self.byoa_secret_name = kwargs.get('byoa_secret_name', None) + + class GlobalParameterSpecification(msrest.serialization.Model): """Definition of a single parameter for an entity. @@ -15298,7 +15805,7 @@ class GlobalParameterSpecification(msrest.serialization.Model): :param type: Required. Global Parameter type. Possible values include: "Object", "String", "Int", "Float", "Bool", "Array". - :type type: str or ~data_factory_management_client.models.GlobalParameterType + :type type: str or ~azure.mgmt.datafactory.models.GlobalParameterType :param value: Required. Value of parameter. :type value: object """ @@ -15333,11 +15840,11 @@ class GoogleAdWordsLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param client_customer_id: Required. The Client customer ID of the AdWords account that you @@ -15345,21 +15852,21 @@ class GoogleAdWordsLinkedService(LinkedService): :type client_customer_id: object :param developer_token: Required. The developer token associated with the manager account that you use to grant access to the AdWords API. - :type developer_token: ~data_factory_management_client.models.SecretBase + :type developer_token: ~azure.mgmt.datafactory.models.SecretBase :param authentication_type: Required. The OAuth 2.0 authentication mechanism used for authentication. ServiceAuthentication can only be used on self-hosted IR. Possible values include: "ServiceAuthentication", "UserAuthentication". :type authentication_type: str or - ~data_factory_management_client.models.GoogleAdWordsAuthenticationType + ~azure.mgmt.datafactory.models.GoogleAdWordsAuthenticationType :param refresh_token: The refresh token obtained from Google for authorizing access to AdWords for UserAuthentication. - :type refresh_token: ~data_factory_management_client.models.SecretBase + :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase :param client_id: The client id of the google application used to acquire the refresh token. Type: string (or Expression with resultType string). :type client_id: object :param client_secret: The client secret of the google application used to acquire the refresh token. - :type client_secret: ~data_factory_management_client.models.SecretBase + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase :param email: The service account email ID that is used for ServiceAuthentication and can only be used on self-hosted IR. :type email: object @@ -15444,14 +15951,14 @@ class GoogleAdWordsObjectDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ @@ -15502,12 +16009,15 @@ class GoogleAdWordsSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -15523,8 +16033,9 @@ class GoogleAdWordsSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -15548,11 +16059,11 @@ class GoogleBigQueryLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param project: Required. The default BigQuery project to query against. @@ -15567,16 +16078,16 @@ class GoogleBigQueryLinkedService(LinkedService): authentication. ServiceAuthentication can only be used on self-hosted IR. Possible values include: "ServiceAuthentication", "UserAuthentication". :type authentication_type: str or - ~data_factory_management_client.models.GoogleBigQueryAuthenticationType + ~azure.mgmt.datafactory.models.GoogleBigQueryAuthenticationType :param refresh_token: The refresh token obtained from Google for authorizing access to BigQuery for UserAuthentication. - :type refresh_token: ~data_factory_management_client.models.SecretBase + :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase :param client_id: The client id of the google application used to acquire the refresh token. Type: string (or Expression with resultType string). :type client_id: object :param client_secret: The client secret of the google application used to acquire the refresh token. - :type client_secret: ~data_factory_management_client.models.SecretBase + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase :param email: The service account email ID that is used for ServiceAuthentication and can only be used on self-hosted IR. :type email: object @@ -15662,14 +16173,14 @@ class GoogleBigQueryObjectDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using database + table properties instead. :type table_name: object @@ -15731,12 +16242,15 @@ class GoogleBigQuerySource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -15752,8 +16266,9 @@ class GoogleBigQuerySource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -15777,11 +16292,11 @@ class GoogleCloudStorageLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param access_key_id: The access key identifier of the Google Cloud Storage Identity and Access @@ -15789,7 +16304,7 @@ class GoogleCloudStorageLinkedService(LinkedService): :type access_key_id: object :param secret_access_key: The secret access key of the Google Cloud Storage Identity and Access Management (IAM) user. - :type secret_access_key: ~data_factory_management_client.models.SecretBase + :type secret_access_key: ~azure.mgmt.datafactory.models.SecretBase :param service_url: This value specifies the endpoint to access with the Google Cloud Storage Connector. This is an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType @@ -15890,6 +16405,9 @@ class GoogleCloudStorageReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -15930,6 +16448,7 @@ class GoogleCloudStorageReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -15971,18 +16490,18 @@ class GreenplumLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param pwd: The Azure key vault secret reference of password in connection string. - :type pwd: ~data_factory_management_client.models.AzureKeyVaultSecretReference + :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -16035,12 +16554,15 @@ class GreenplumSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -16056,8 +16578,9 @@ class GreenplumSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -16089,14 +16612,14 @@ class GreenplumTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. :type table_name: object @@ -16149,11 +16672,11 @@ class HBaseLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param host: Required. The IP address or host name of the HBase server. (i.e. 192.168.222.160). @@ -16166,12 +16689,11 @@ class HBaseLinkedService(LinkedService): :type http_path: object :param authentication_type: Required. The authentication mechanism to use to connect to the HBase server. Possible values include: "Anonymous", "Basic". - :type authentication_type: str or - ~data_factory_management_client.models.HBaseAuthenticationType + :type authentication_type: str or ~azure.mgmt.datafactory.models.HBaseAuthenticationType :param username: The user name used to connect to the HBase instance. :type username: object :param password: The password corresponding to the user name. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. :type enable_ssl: object @@ -16255,14 +16777,14 @@ class HBaseObjectDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ @@ -16313,12 +16835,15 @@ class HBaseSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -16334,8 +16859,9 @@ class HBaseSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -16359,11 +16885,11 @@ class HdfsLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param url: Required. The URL of the HDFS service endpoint, e.g. @@ -16380,7 +16906,7 @@ class HdfsLinkedService(LinkedService): resultType string). :type user_name: object :param password: Password for Windows authentication. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase """ _validation = { @@ -16465,6 +16991,9 @@ class HdfsReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -16490,7 +17019,7 @@ class HdfsReadSettings(StoreReadSettings): with resultType string). :type modified_datetime_end: object :param distcp_settings: Specifies Distcp-related settings. - :type distcp_settings: ~data_factory_management_client.models.DistcpSettings + :type distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings :param delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). :type delete_files_after_completion: object @@ -16504,6 +17033,7 @@ class HdfsReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -16553,11 +17083,14 @@ class HdfsSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object :param distcp_settings: Specifies Distcp-related settings. - :type distcp_settings: ~data_factory_management_client.models.DistcpSettings + :type distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings """ _validation = { @@ -16570,6 +17103,7 @@ class HdfsSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, } @@ -16599,25 +17133,23 @@ class HdInsightHiveActivity(ExecutionActivity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param linked_service_name: Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. - :type policy: ~data_factory_management_client.models.ActivityPolicy + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param storage_linked_services: Storage linked service references. - :type storage_linked_services: - list[~data_factory_management_client.models.LinkedServiceReference] + :type storage_linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] :param arguments: User specified arguments to HDInsightActivity. :type arguments: list[object] :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". - :type get_debug_info: str or - ~data_factory_management_client.models.HdInsightActivityDebugInfoOption + :type get_debug_info: str or ~azure.mgmt.datafactory.models.HdInsightActivityDebugInfoOption :param script_path: Script path. Type: string (or Expression with resultType string). :type script_path: object :param script_linked_service: Script linked service reference. - :type script_linked_service: ~data_factory_management_client.models.LinkedServiceReference + :type script_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :param defines: Allows user to specify defines for Hive job request. :type defines: dict[str, object] :param variables: User specified arguments under hivevar namespace. @@ -16678,11 +17210,11 @@ class HdInsightLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param cluster_uri: Required. HDInsight cluster URI. Type: string (or Expression with @@ -16692,13 +17224,12 @@ class HdInsightLinkedService(LinkedService): string). :type user_name: object :param password: HDInsight cluster password. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param linked_service_name: The Azure Storage linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param hcatalog_linked_service_name: A reference to the Azure SQL linked service that points to the HCatalog database. - :type hcatalog_linked_service_name: - ~data_factory_management_client.models.LinkedServiceReference + :type hcatalog_linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -16764,27 +17295,25 @@ class HdInsightMapReduceActivity(ExecutionActivity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param linked_service_name: Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. - :type policy: ~data_factory_management_client.models.ActivityPolicy + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param storage_linked_services: Storage linked service references. - :type storage_linked_services: - list[~data_factory_management_client.models.LinkedServiceReference] + :type storage_linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] :param arguments: User specified arguments to HDInsightActivity. :type arguments: list[object] :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". - :type get_debug_info: str or - ~data_factory_management_client.models.HdInsightActivityDebugInfoOption + :type get_debug_info: str or ~azure.mgmt.datafactory.models.HdInsightActivityDebugInfoOption :param class_name: Required. Class name. Type: string (or Expression with resultType string). :type class_name: object :param jar_file_path: Required. Jar path. Type: string (or Expression with resultType string). :type jar_file_path: object :param jar_linked_service: Jar linked service reference. - :type jar_linked_service: ~data_factory_management_client.models.LinkedServiceReference + :type jar_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :param jar_libs: Jar libs. :type jar_libs: list[object] :param defines: Allows user to specify defines for the MapReduce job request. @@ -16844,11 +17373,11 @@ class HdInsightOnDemandLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param cluster_size: Required. Number of worker/data nodes in the cluster. Suggestion value: 4. @@ -16864,7 +17393,7 @@ class HdInsightOnDemandLinkedService(LinkedService): :type version: object :param linked_service_name: Required. Azure Storage linked service to be used by the on-demand cluster for storing and processing data. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param host_subscription_id: Required. The customer’s subscription to host the cluster. Type: string (or Expression with resultType string). :type host_subscription_id: object @@ -16872,7 +17401,7 @@ class HdInsightOnDemandLinkedService(LinkedService): (or Expression with resultType string). :type service_principal_id: object :param service_principal_key: The key for the service principal id. - :type service_principal_key: ~data_factory_management_client.models.SecretBase + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: Required. The Tenant id/name to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object @@ -16886,21 +17415,20 @@ class HdInsightOnDemandLinkedService(LinkedService): resultType string). :type cluster_user_name: object :param cluster_password: The password to access the cluster. - :type cluster_password: ~data_factory_management_client.models.SecretBase + :type cluster_password: ~azure.mgmt.datafactory.models.SecretBase :param cluster_ssh_user_name: The username to SSH remotely connect to cluster’s node (for Linux). Type: string (or Expression with resultType string). :type cluster_ssh_user_name: object :param cluster_ssh_password: The password to SSH remotely connect cluster’s node (for Linux). - :type cluster_ssh_password: ~data_factory_management_client.models.SecretBase + :type cluster_ssh_password: ~azure.mgmt.datafactory.models.SecretBase :param additional_linked_service_names: Specifies additional storage accounts for the HDInsight linked service so that the Data Factory service can register them on your behalf. :type additional_linked_service_names: - list[~data_factory_management_client.models.LinkedServiceReference] + list[~azure.mgmt.datafactory.models.LinkedServiceReference] :param hcatalog_linked_service_name: The name of Azure SQL linked service that point to the HCatalog database. The on-demand HDInsight cluster is created by using the Azure SQL database as the metastore. - :type hcatalog_linked_service_name: - ~data_factory_management_client.models.LinkedServiceReference + :type hcatalog_linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param cluster_type: The cluster type. Type: string (or Expression with resultType string). :type cluster_type: object :param spark_version: The version of spark if the cluster type is 'spark'. Type: string (or @@ -16945,13 +17473,15 @@ class HdInsightOnDemandLinkedService(LinkedService): Please refer to https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize- cluster-linux?toc=%2Fen-us%2Fazure%2Fhdinsight%2Fr-server%2FTOC.json&bc=%2Fen- us%2Fazure%2Fbread%2Ftoc.json#understanding-script-actions. - :type script_actions: list[~data_factory_management_client.models.ScriptAction] + :type script_actions: list[~azure.mgmt.datafactory.models.ScriptAction] :param virtual_network_id: The ARM resource ID for the vNet to which the cluster should be joined after creation. Type: string (or Expression with resultType string). :type virtual_network_id: object :param subnet_name: The ARM resource ID for the subnet in the vNet. If virtualNetworkId was specified, then this property is required. Type: string (or Expression with resultType string). :type subnet_name: object + :param credential: The credential reference containing authentication information. + :type credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { @@ -17005,6 +17535,7 @@ class HdInsightOnDemandLinkedService(LinkedService): 'script_actions': {'key': 'typeProperties.scriptActions', 'type': '[ScriptAction]'}, 'virtual_network_id': {'key': 'typeProperties.virtualNetworkId', 'type': 'object'}, 'subnet_name': {'key': 'typeProperties.subnetName', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, } def __init__( @@ -17046,6 +17577,7 @@ def __init__( self.script_actions = kwargs.get('script_actions', None) self.virtual_network_id = kwargs.get('virtual_network_id', None) self.subnet_name = kwargs.get('subnet_name', None) + self.credential = kwargs.get('credential', None) class HdInsightPigActivity(ExecutionActivity): @@ -17063,26 +17595,24 @@ class HdInsightPigActivity(ExecutionActivity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param linked_service_name: Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. - :type policy: ~data_factory_management_client.models.ActivityPolicy + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param storage_linked_services: Storage linked service references. - :type storage_linked_services: - list[~data_factory_management_client.models.LinkedServiceReference] + :type storage_linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] :param arguments: User specified arguments to HDInsightActivity. Type: array (or Expression with resultType array). :type arguments: object :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". - :type get_debug_info: str or - ~data_factory_management_client.models.HdInsightActivityDebugInfoOption + :type get_debug_info: str or ~azure.mgmt.datafactory.models.HdInsightActivityDebugInfoOption :param script_path: Script path. Type: string (or Expression with resultType string). :type script_path: object :param script_linked_service: Script linked service reference. - :type script_linked_service: ~data_factory_management_client.models.LinkedServiceReference + :type script_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :param defines: Allows user to specify defines for Pig job request. :type defines: dict[str, object] """ @@ -17138,13 +17668,13 @@ class HdInsightSparkActivity(ExecutionActivity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param linked_service_name: Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. - :type policy: ~data_factory_management_client.models.ActivityPolicy + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param root_path: Required. The root path in 'sparkJobLinkedService' for all the job’s files. Type: string (or Expression with resultType string). :type root_path: object @@ -17154,11 +17684,10 @@ class HdInsightSparkActivity(ExecutionActivity): :param arguments: The user-specified arguments to HDInsightSparkActivity. :type arguments: list[object] :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". - :type get_debug_info: str or - ~data_factory_management_client.models.HdInsightActivityDebugInfoOption + :type get_debug_info: str or ~azure.mgmt.datafactory.models.HdInsightActivityDebugInfoOption :param spark_job_linked_service: The storage linked service for uploading the entry file and dependencies, and for receiving logs. - :type spark_job_linked_service: ~data_factory_management_client.models.LinkedServiceReference + :type spark_job_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :param class_name: The application's Java/Spark main class. :type class_name: str :param proxy_user: The user to impersonate that will execute the job. Type: string (or @@ -17225,21 +17754,19 @@ class HdInsightStreamingActivity(ExecutionActivity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param linked_service_name: Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. - :type policy: ~data_factory_management_client.models.ActivityPolicy + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param storage_linked_services: Storage linked service references. - :type storage_linked_services: - list[~data_factory_management_client.models.LinkedServiceReference] + :type storage_linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] :param arguments: User specified arguments to HDInsightActivity. :type arguments: list[object] :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". - :type get_debug_info: str or - ~data_factory_management_client.models.HdInsightActivityDebugInfoOption + :type get_debug_info: str or ~azure.mgmt.datafactory.models.HdInsightActivityDebugInfoOption :param mapper: Required. Mapper executable name. Type: string (or Expression with resultType string). :type mapper: object @@ -17253,7 +17780,7 @@ class HdInsightStreamingActivity(ExecutionActivity): :param file_paths: Required. Paths to streaming job files. Can be directories. :type file_paths: list[object] :param file_linked_service: Linked service reference where the files are located. - :type file_linked_service: ~data_factory_management_client.models.LinkedServiceReference + :type file_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :param combiner: Combiner executable name. Type: string (or Expression with resultType string). :type combiner: object :param command_environment: Command line environment values. @@ -17326,11 +17853,11 @@ class HiveLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param host: Required. IP address or host name of the Hive server, separated by ';' for @@ -17340,15 +17867,15 @@ class HiveLinkedService(LinkedService): :type port: object :param server_type: The type of Hive server. Possible values include: "HiveServer1", "HiveServer2", "HiveThriftServer". - :type server_type: str or ~data_factory_management_client.models.HiveServerType + :type server_type: str or ~azure.mgmt.datafactory.models.HiveServerType :param thrift_transport_protocol: The transport protocol to use in the Thrift layer. Possible values include: "Binary", "SASL", "HTTP ". :type thrift_transport_protocol: str or - ~data_factory_management_client.models.HiveThriftTransportProtocol + ~azure.mgmt.datafactory.models.HiveThriftTransportProtocol :param authentication_type: Required. The authentication method used to access the Hive server. Possible values include: "Anonymous", "Username", "UsernameAndPassword", "WindowsAzureHDInsightService". - :type authentication_type: str or ~data_factory_management_client.models.HiveAuthenticationType + :type authentication_type: str or ~azure.mgmt.datafactory.models.HiveAuthenticationType :param service_discovery_mode: true to indicate using the ZooKeeper service, false not. :type service_discovery_mode: object :param zoo_keeper_name_space: The namespace on ZooKeeper under which Hive Server 2 nodes are @@ -17361,7 +17888,7 @@ class HiveLinkedService(LinkedService): :type username: object :param password: The password corresponding to the user name that you provided in the Username field. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param http_path: The partial URL corresponding to the Hive server. :type http_path: object :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The @@ -17462,14 +17989,14 @@ class HiveObjectDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. :type table_name: object @@ -17530,12 +18057,15 @@ class HiveSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -17551,8 +18081,9 @@ class HiveSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -17584,14 +18115,14 @@ class HttpDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param relative_url: The relative URL based on the URL in the HttpLinkedService refers to an HTTP file Type: string (or Expression with resultType string). :type relative_url: object @@ -17608,9 +18139,9 @@ class HttpDataset(Dataset): string). :type additional_headers: object :param format: The format of files. - :type format: ~data_factory_management_client.models.DatasetStorageFormat + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :param compression: The data compression method used on files. - :type compression: ~data_factory_management_client.models.DatasetCompression + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression """ _validation = { @@ -17661,11 +18192,11 @@ class HttpLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param url: Required. The base URL of the HTTP endpoint, e.g. http://www.microsoft.com. Type: @@ -17673,13 +18204,13 @@ class HttpLinkedService(LinkedService): :type url: object :param authentication_type: The authentication type to be used to connect to the HTTP server. Possible values include: "Basic", "Anonymous", "Digest", "Windows", "ClientCertificate". - :type authentication_type: str or ~data_factory_management_client.models.HttpAuthenticationType + :type authentication_type: str or ~azure.mgmt.datafactory.models.HttpAuthenticationType :param user_name: User name for Basic, Digest, or Windows authentication. Type: string (or Expression with resultType string). :type user_name: object :param password: Password for Basic, Digest, Windows, or ClientCertificate with EmbeddedCertData authentication. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param auth_headers: The additional HTTP headers in the request to RESTful API used for authorization. Type: object (or Expression with resultType object). :type auth_headers: object @@ -17755,6 +18286,9 @@ class HttpReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param request_method: The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). :type request_method: object @@ -17782,6 +18316,7 @@ class HttpReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'request_method': {'key': 'requestMethod', 'type': 'object'}, 'request_body': {'key': 'requestBody', 'type': 'object'}, 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, @@ -17865,6 +18400,9 @@ class HttpSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param http_request_timeout: Specifies the timeout for a HTTP client to get HTTP response from HTTP server. The default value is equivalent to System.Net.HttpWebRequest.Timeout. Type: string (or Expression with resultType string), pattern: @@ -17882,6 +18420,7 @@ class HttpSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } @@ -17905,23 +18444,23 @@ class HubspotLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param client_id: Required. The client ID associated with your Hubspot application. :type client_id: object :param client_secret: The client secret associated with your Hubspot application. - :type client_secret: ~data_factory_management_client.models.SecretBase + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase :param access_token: The access token obtained when initially authenticating your OAuth integration. - :type access_token: ~data_factory_management_client.models.SecretBase + :type access_token: ~azure.mgmt.datafactory.models.SecretBase :param refresh_token: The refresh token obtained when initially authenticating your OAuth integration. - :type refresh_token: ~data_factory_management_client.models.SecretBase + :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. :type use_encrypted_endpoints: object @@ -17995,14 +18534,14 @@ class HubspotObjectDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ @@ -18053,12 +18592,15 @@ class HubspotSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -18074,8 +18616,9 @@ class HubspotSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -18103,19 +18646,19 @@ class IfConditionActivity(Activity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param expression: Required. An expression that would evaluate to Boolean. This is used to determine the block of activities (ifTrueActivities or ifFalseActivities) that will be executed. - :type expression: ~data_factory_management_client.models.Expression + :type expression: ~azure.mgmt.datafactory.models.Expression :param if_true_activities: List of activities to execute if expression is evaluated to true. This is an optional property and if not provided, the activity will exit without any action. - :type if_true_activities: list[~data_factory_management_client.models.Activity] + :type if_true_activities: list[~azure.mgmt.datafactory.models.Activity] :param if_false_activities: List of activities to execute if expression is evaluated to false. This is an optional property and if not provided, the activity will exit without any action. - :type if_false_activities: list[~data_factory_management_client.models.Activity] + :type if_false_activities: list[~azure.mgmt.datafactory.models.Activity] """ _validation = { @@ -18158,11 +18701,11 @@ class ImpalaLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param host: Required. The IP address or host name of the Impala server. (i.e. @@ -18173,13 +18716,12 @@ class ImpalaLinkedService(LinkedService): :type port: object :param authentication_type: Required. The authentication type to use. Possible values include: "Anonymous", "SASLUsername", "UsernameAndPassword". - :type authentication_type: str or - ~data_factory_management_client.models.ImpalaAuthenticationType + :type authentication_type: str or ~azure.mgmt.datafactory.models.ImpalaAuthenticationType :param username: The user name used to access the Impala server. The default value is anonymous when using SASLUsername. :type username: object :param password: The password corresponding to the user name when using UsernameAndPassword. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. :type enable_ssl: object @@ -18266,14 +18808,14 @@ class ImpalaObjectDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. :type table_name: object @@ -18335,12 +18877,15 @@ class ImpalaSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -18356,8 +18901,9 @@ class ImpalaSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -18381,11 +18927,11 @@ class InformixLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: Required. The non-access credential portion of the connection string @@ -18398,12 +18944,12 @@ class InformixLinkedService(LinkedService): :type authentication_type: object :param credential: The access credential portion of the connection string specified in driver- specific property-value format. - :type credential: ~data_factory_management_client.models.SecretBase + :type credential: ~azure.mgmt.datafactory.models.SecretBase :param user_name: User name for Basic authentication. Type: string (or Expression with resultType string). :type user_name: object :param password: Password for Basic authentication. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -18469,6 +19015,9 @@ class InformixSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). :type pre_copy_script: object @@ -18486,6 +19035,7 @@ class InformixSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -18517,12 +19067,15 @@ class InformixSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object """ @@ -18537,8 +19090,9 @@ class InformixSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -18570,14 +19124,14 @@ class InformixTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The Informix table name. Type: string (or Expression with resultType string). :type table_name: object @@ -18623,7 +19177,7 @@ class IntegrationRuntime(msrest.serialization.Model): :type additional_properties: dict[str, object] :param type: Required. Type of integration runtime.Constant filled by server. Possible values include: "Managed", "SelfHosted". - :type type: str or ~data_factory_management_client.models.IntegrationRuntimeType + :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType :param description: Integration runtime description. :type description: str """ @@ -18693,10 +19247,9 @@ class IntegrationRuntimeComputeProperties(msrest.serialization.Model): integration runtime. :type max_parallel_executions_per_node: int :param data_flow_properties: Data flow properties for managed integration runtime. - :type data_flow_properties: - ~data_factory_management_client.models.IntegrationRuntimeDataFlowProperties + :type data_flow_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeDataFlowProperties :param v_net_properties: VNet properties for managed integration runtime. - :type v_net_properties: ~data_factory_management_client.models.IntegrationRuntimeVNetProperties + :type v_net_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeVNetProperties """ _validation = { @@ -18793,7 +19346,7 @@ class IntegrationRuntimeCustomSetupScriptProperties(msrest.serialization.Model): script. :type blob_container_uri: str :param sas_token: The SAS token of the Azure blob container. - :type sas_token: ~data_factory_management_client.models.SecureString + :type sas_token: ~azure.mgmt.datafactory.models.SecureString """ _attribute_map = { @@ -18818,13 +19371,16 @@ class IntegrationRuntimeDataFlowProperties(msrest.serialization.Model): :type additional_properties: dict[str, object] :param compute_type: Compute type of the cluster which will execute data flow job. Possible values include: "General", "MemoryOptimized", "ComputeOptimized". - :type compute_type: str or ~data_factory_management_client.models.DataFlowComputeType + :type compute_type: str or ~azure.mgmt.datafactory.models.DataFlowComputeType :param core_count: Core count of the cluster which will execute data flow job. Supported values are: 8, 16, 32, 48, 80, 144 and 272. :type core_count: int :param time_to_live: Time to live (in minutes) setting of the cluster which will execute data flow job. :type time_to_live: int + :param cleanup: Cluster will not be recycled and it will be used in next data flow activity run + until TTL (time to live) is reached if this is set as false. Default is true. + :type cleanup: bool """ _validation = { @@ -18836,6 +19392,7 @@ class IntegrationRuntimeDataFlowProperties(msrest.serialization.Model): 'compute_type': {'key': 'computeType', 'type': 'str'}, 'core_count': {'key': 'coreCount', 'type': 'int'}, 'time_to_live': {'key': 'timeToLive', 'type': 'int'}, + 'cleanup': {'key': 'cleanup', 'type': 'bool'}, } def __init__( @@ -18847,15 +19404,16 @@ def __init__( self.compute_type = kwargs.get('compute_type', None) self.core_count = kwargs.get('core_count', None) self.time_to_live = kwargs.get('time_to_live', None) + self.cleanup = kwargs.get('cleanup', None) class IntegrationRuntimeDataProxyProperties(msrest.serialization.Model): """Data proxy properties for a managed dedicated integration runtime. :param connect_via: The self-hosted integration runtime reference. - :type connect_via: ~data_factory_management_client.models.EntityReference + :type connect_via: ~azure.mgmt.datafactory.models.EntityReference :param staging_linked_service: The staging linked service reference. - :type staging_linked_service: ~data_factory_management_client.models.EntityReference + :type staging_linked_service: ~azure.mgmt.datafactory.models.EntityReference :param path: The path to contain the staged data in the Blob storage. :type path: str """ @@ -18884,7 +19442,7 @@ class IntegrationRuntimeDebugResource(SubResourceDebugResource): :param name: The resource name. :type name: str :param properties: Required. Integration runtime properties. - :type properties: ~data_factory_management_client.models.IntegrationRuntime + :type properties: ~azure.mgmt.datafactory.models.IntegrationRuntime """ _validation = { @@ -18910,7 +19468,7 @@ class IntegrationRuntimeListResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. :param value: Required. List of integration runtimes. - :type value: list[~data_factory_management_client.models.IntegrationRuntimeResource] + :type value: list[~azure.mgmt.datafactory.models.IntegrationRuntimeResource] :param next_link: The link to the next page of results, if any remaining results exist. :type next_link: str """ @@ -18939,7 +19497,7 @@ class IntegrationRuntimeMonitoringData(msrest.serialization.Model): :param name: Integration runtime name. :type name: str :param nodes: Integration runtime node monitoring data. - :type nodes: list[~data_factory_management_client.models.IntegrationRuntimeNodeMonitoringData] + :type nodes: list[~azure.mgmt.datafactory.models.IntegrationRuntimeNodeMonitoringData] """ _attribute_map = { @@ -19047,6 +19605,93 @@ def __init__( self.received_bytes = None +class IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint(msrest.serialization.Model): + """Azure-SSIS integration runtime outbound network dependency endpoints for one category. + + :param category: The category of outbound network dependency. + :type category: str + :param endpoints: The endpoints for outbound network dependency. + :type endpoints: + list[~azure.mgmt.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesEndpoint] + """ + + _attribute_map = { + 'category': {'key': 'category', 'type': 'str'}, + 'endpoints': {'key': 'endpoints', 'type': '[IntegrationRuntimeOutboundNetworkDependenciesEndpoint]'}, + } + + def __init__( + self, + **kwargs + ): + super(IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint, self).__init__(**kwargs) + self.category = kwargs.get('category', None) + self.endpoints = kwargs.get('endpoints', None) + + +class IntegrationRuntimeOutboundNetworkDependenciesEndpoint(msrest.serialization.Model): + """The endpoint for Azure-SSIS integration runtime outbound network dependency. + + :param domain_name: The domain name of endpoint. + :type domain_name: str + :param endpoint_details: The details of endpoint. + :type endpoint_details: + list[~azure.mgmt.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails] + """ + + _attribute_map = { + 'domain_name': {'key': 'domainName', 'type': 'str'}, + 'endpoint_details': {'key': 'endpointDetails', 'type': '[IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails]'}, + } + + def __init__( + self, + **kwargs + ): + super(IntegrationRuntimeOutboundNetworkDependenciesEndpoint, self).__init__(**kwargs) + self.domain_name = kwargs.get('domain_name', None) + self.endpoint_details = kwargs.get('endpoint_details', None) + + +class IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails(msrest.serialization.Model): + """The details of Azure-SSIS integration runtime outbound network dependency endpoint. + + :param port: The port of endpoint. + :type port: int + """ + + _attribute_map = { + 'port': {'key': 'port', 'type': 'int'}, + } + + def __init__( + self, + **kwargs + ): + super(IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails, self).__init__(**kwargs) + self.port = kwargs.get('port', None) + + +class IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse(msrest.serialization.Model): + """Azure-SSIS integration runtime outbound network dependency endpoints. + + :param value: The list of outbound network dependency endpoints. + :type value: + list[~azure.mgmt.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint]'}, + } + + def __init__( + self, + **kwargs + ): + super(IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + + class IntegrationRuntimeReference(msrest.serialization.Model): """Integration runtime reference type. @@ -19090,7 +19735,7 @@ class IntegrationRuntimeRegenerateKeyParameters(msrest.serialization.Model): :param key_name: The name of the authentication key to regenerate. Possible values include: "authKey1", "authKey2". - :type key_name: str or ~data_factory_management_client.models.IntegrationRuntimeAuthKeyName + :type key_name: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeAuthKeyName """ _attribute_map = { @@ -19121,7 +19766,7 @@ class IntegrationRuntimeResource(SubResource): :ivar etag: Etag identifies change in the resource. :vartype etag: str :param properties: Required. Integration runtime properties. - :type properties: ~data_factory_management_client.models.IntegrationRuntime + :type properties: ~azure.mgmt.datafactory.models.IntegrationRuntime """ _validation = { @@ -19160,12 +19805,12 @@ class IntegrationRuntimeSsisCatalogInfo(msrest.serialization.Model): :type catalog_admin_user_name: str :param catalog_admin_password: The password of the administrator user account of the catalog database. - :type catalog_admin_password: ~data_factory_management_client.models.SecureString + :type catalog_admin_password: ~azure.mgmt.datafactory.models.SecureString :param catalog_pricing_tier: The pricing tier for the catalog database. The valid values could be found in https://azure.microsoft.com/en-us/pricing/details/sql-database/. Possible values include: "Basic", "Standard", "Premium", "PremiumRS". :type catalog_pricing_tier: str or - ~data_factory_management_client.models.IntegrationRuntimeSsisCatalogPricingTier + ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisCatalogPricingTier :param dual_standby_pair_name: The dual standby pair name of Azure-SSIS Integration Runtimes to support SSISDB failover. :type dual_standby_pair_name: str @@ -19204,27 +19849,28 @@ class IntegrationRuntimeSsisProperties(msrest.serialization.Model): collection. :type additional_properties: dict[str, object] :param catalog_info: Catalog information for managed dedicated integration runtime. - :type catalog_info: ~data_factory_management_client.models.IntegrationRuntimeSsisCatalogInfo + :type catalog_info: ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisCatalogInfo :param license_type: License type for bringing your own license scenario. Possible values include: "BasePrice", "LicenseIncluded". - :type license_type: str or ~data_factory_management_client.models.IntegrationRuntimeLicenseType + :type license_type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeLicenseType :param custom_setup_script_properties: Custom setup script properties for a managed dedicated integration runtime. :type custom_setup_script_properties: - ~data_factory_management_client.models.IntegrationRuntimeCustomSetupScriptProperties + ~azure.mgmt.datafactory.models.IntegrationRuntimeCustomSetupScriptProperties :param data_proxy_properties: Data proxy properties for a managed dedicated integration runtime. :type data_proxy_properties: - ~data_factory_management_client.models.IntegrationRuntimeDataProxyProperties + ~azure.mgmt.datafactory.models.IntegrationRuntimeDataProxyProperties :param edition: The edition for the SSIS Integration Runtime. Possible values include: "Standard", "Enterprise". - :type edition: str or ~data_factory_management_client.models.IntegrationRuntimeEdition + :type edition: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeEdition :param express_custom_setup_properties: Custom setup without script properties for a SSIS integration runtime. - :type express_custom_setup_properties: - list[~data_factory_management_client.models.CustomSetupBase] + :type express_custom_setup_properties: list[~azure.mgmt.datafactory.models.CustomSetupBase] :param package_stores: Package stores for the SSIS Integration Runtime. - :type package_stores: list[~data_factory_management_client.models.PackageStore] + :type package_stores: list[~azure.mgmt.datafactory.models.PackageStore] + :param credential: The credential reference containing authentication information. + :type credential: ~azure.mgmt.datafactory.models.CredentialReference """ _attribute_map = { @@ -19236,6 +19882,7 @@ class IntegrationRuntimeSsisProperties(msrest.serialization.Model): 'edition': {'key': 'edition', 'type': 'str'}, 'express_custom_setup_properties': {'key': 'expressCustomSetupProperties', 'type': '[CustomSetupBase]'}, 'package_stores': {'key': 'packageStores', 'type': '[PackageStore]'}, + 'credential': {'key': 'credential', 'type': 'CredentialReference'}, } def __init__( @@ -19251,6 +19898,7 @@ def __init__( self.edition = kwargs.get('edition', None) self.express_custom_setup_properties = kwargs.get('express_custom_setup_properties', None) self.package_stores = kwargs.get('package_stores', None) + self.credential = kwargs.get('credential', None) class IntegrationRuntimeStatus(msrest.serialization.Model): @@ -19268,13 +19916,13 @@ class IntegrationRuntimeStatus(msrest.serialization.Model): :type additional_properties: dict[str, object] :param type: Required. Type of integration runtime.Constant filled by server. Possible values include: "Managed", "SelfHosted". - :type type: str or ~data_factory_management_client.models.IntegrationRuntimeType + :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType :ivar data_factory_name: The data factory name which the integration runtime belong to. :vartype data_factory_name: str :ivar state: The state of integration runtime. Possible values include: "Initial", "Stopped", "Started", "Starting", "Stopping", "NeedRegistration", "Online", "Limited", "Offline", "AccessDenied". - :vartype state: str or ~data_factory_management_client.models.IntegrationRuntimeState + :vartype state: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeState """ _validation = { @@ -19311,7 +19959,7 @@ class IntegrationRuntimeStatusListResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. :param value: Required. List of integration runtime status. - :type value: list[~data_factory_management_client.models.IntegrationRuntimeStatusResponse] + :type value: list[~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse] :param next_link: The link to the next page of results, if any remaining results exist. :type next_link: str """ @@ -19344,7 +19992,7 @@ class IntegrationRuntimeStatusResponse(msrest.serialization.Model): :ivar name: The integration runtime name. :vartype name: str :param properties: Required. Integration runtime properties. - :type properties: ~data_factory_management_client.models.IntegrationRuntimeStatus + :type properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeStatus """ _validation = { @@ -19379,6 +20027,9 @@ class IntegrationRuntimeVNetProperties(msrest.serialization.Model): :param public_i_ps: Resource IDs of the public IP addresses that this integration runtime will use. :type public_i_ps: list[str] + :param subnet_id: The ID of subnet, to which this Azure-SSIS integration runtime will be + joined. + :type subnet_id: str """ _attribute_map = { @@ -19386,6 +20037,7 @@ class IntegrationRuntimeVNetProperties(msrest.serialization.Model): 'v_net_id': {'key': 'vNetId', 'type': 'str'}, 'subnet': {'key': 'subnet', 'type': 'str'}, 'public_i_ps': {'key': 'publicIPs', 'type': '[str]'}, + 'subnet_id': {'key': 'subnetId', 'type': 'str'}, } def __init__( @@ -19397,6 +20049,7 @@ def __init__( self.v_net_id = kwargs.get('v_net_id', None) self.subnet = kwargs.get('subnet', None) self.public_i_ps = kwargs.get('public_i_ps', None) + self.subnet_id = kwargs.get('subnet_id', None) class JiraLinkedService(LinkedService): @@ -19410,11 +20063,11 @@ class JiraLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param host: Required. The IP address or host name of the Jira service. (e.g. @@ -19427,7 +20080,7 @@ class JiraLinkedService(LinkedService): :type username: object :param password: The password corresponding to the user name that you provided in the username field. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. :type use_encrypted_endpoints: object @@ -19502,14 +20155,14 @@ class JiraObjectDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ @@ -19560,12 +20213,15 @@ class JiraSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -19581,8 +20237,9 @@ class JiraSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -19614,16 +20271,16 @@ class JsonDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param location: The location of the json data storage. - :type location: ~data_factory_management_client.models.DatasetLocation + :type location: ~azure.mgmt.datafactory.models.DatasetLocation :param encoding_name: The code page name of the preferred encoding. If not specified, the default value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column of the table in the following link to set supported values: @@ -19631,7 +20288,7 @@ class JsonDataset(Dataset): resultType string). :type encoding_name: object :param compression: The data compression method used for the json dataset. - :type compression: ~data_factory_management_client.models.DatasetCompression + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression """ _validation = { @@ -19680,9 +20337,8 @@ class JsonFormat(DatasetStorageFormat): :param deserializer: Deserializer. Type: string (or Expression with resultType string). :type deserializer: object :param file_pattern: File pattern of JSON. To be more specific, the way of separating a - collection of JSON objects. The default value is 'setOfObjects'. It is case-sensitive. Possible - values include: "setOfObjects", "arrayOfObjects". - :type file_pattern: str or ~data_factory_management_client.models.JsonFormatFilePattern + collection of JSON objects. The default value is 'setOfObjects'. It is case-sensitive. + :type file_pattern: object :param nesting_separator: The character used to separate nesting levels. Default value is '.' (dot). Type: string (or Expression with resultType string). :type nesting_separator: object @@ -19712,7 +20368,7 @@ class JsonFormat(DatasetStorageFormat): 'type': {'key': 'type', 'type': 'str'}, 'serializer': {'key': 'serializer', 'type': 'object'}, 'deserializer': {'key': 'deserializer', 'type': 'object'}, - 'file_pattern': {'key': 'filePattern', 'type': 'str'}, + 'file_pattern': {'key': 'filePattern', 'type': 'object'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, 'encoding_name': {'key': 'encodingName', 'type': 'object'}, 'json_node_reference': {'key': 'jsonNodeReference', 'type': 'object'}, @@ -19743,7 +20399,7 @@ class JsonReadSettings(FormatReadSettings): :param type: Required. The read setting type.Constant filled by server. :type type: str :param compression_properties: Compression settings. - :type compression_properties: ~data_factory_management_client.models.CompressionReadSettings + :type compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings """ _validation = { @@ -19790,10 +20446,13 @@ class JsonSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Json store settings. - :type store_settings: ~data_factory_management_client.models.StoreWriteSettings + :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings :param format_settings: Json format settings. - :type format_settings: ~data_factory_management_client.models.JsonWriteSettings + :type format_settings: ~azure.mgmt.datafactory.models.JsonWriteSettings """ _validation = { @@ -19808,6 +20467,7 @@ class JsonSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'JsonWriteSettings'}, } @@ -19841,13 +20501,16 @@ class JsonSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Json store settings. - :type store_settings: ~data_factory_management_client.models.StoreReadSettings + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :param format_settings: Json format settings. - :type format_settings: ~data_factory_management_client.models.JsonReadSettings + :type format_settings: ~azure.mgmt.datafactory.models.JsonReadSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -19860,9 +20523,10 @@ class JsonSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'JsonReadSettings'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -19887,9 +20551,8 @@ class JsonWriteSettings(FormatWriteSettings): :param type: Required. The write setting type.Constant filled by server. :type type: str :param file_pattern: File pattern of JSON. This setting controls the way a collection of JSON - objects will be treated. The default value is 'setOfObjects'. It is case-sensitive. Possible - values include: "setOfObjects", "arrayOfObjects". - :type file_pattern: str or ~data_factory_management_client.models.JsonWriteFilePattern + objects will be treated. The default value is 'setOfObjects'. It is case-sensitive. + :type file_pattern: object """ _validation = { @@ -19899,7 +20562,7 @@ class JsonWriteSettings(FormatWriteSettings): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'file_pattern': {'key': 'filePattern', 'type': 'str'}, + 'file_pattern': {'key': 'filePattern', 'type': 'object'}, } def __init__( @@ -20000,7 +20663,7 @@ class LinkedIntegrationRuntimeKeyAuthorization(LinkedIntegrationRuntimeType): sharing.Constant filled by server. :type authorization_type: str :param key: Required. The key used for authorization. - :type key: ~data_factory_management_client.models.SecureString + :type key: ~azure.mgmt.datafactory.models.SecureString """ _validation = { @@ -20086,7 +20749,7 @@ class LinkedServiceDebugResource(SubResourceDebugResource): :param name: The resource name. :type name: str :param properties: Required. Properties of linked service. - :type properties: ~data_factory_management_client.models.LinkedService + :type properties: ~azure.mgmt.datafactory.models.LinkedService """ _validation = { @@ -20112,7 +20775,7 @@ class LinkedServiceListResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. :param value: Required. List of linked services. - :type value: list[~data_factory_management_client.models.LinkedServiceResource] + :type value: list[~azure.mgmt.datafactory.models.LinkedServiceResource] :param next_link: The link to the next page of results, if any remaining results exist. :type next_link: str """ @@ -20188,7 +20851,7 @@ class LinkedServiceResource(SubResource): :ivar etag: Etag identifies change in the resource. :vartype etag: str :param properties: Required. Properties of linked service. - :type properties: ~data_factory_management_client.models.LinkedService + :type properties: ~azure.mgmt.datafactory.models.LinkedService """ _validation = { @@ -20221,7 +20884,7 @@ class LogLocationSettings(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. :param linked_service_name: Required. Log storage linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param path: The path to storage for storing detailed logs of activity execution. Type: string (or Expression with resultType string). :type path: object @@ -20254,11 +20917,10 @@ class LogSettings(msrest.serialization.Model): (or Expression with resultType boolean). :type enable_copy_activity_log: object :param copy_activity_log_settings: Specifies settings for copy activity log. - :type copy_activity_log_settings: - ~data_factory_management_client.models.CopyActivityLogSettings + :type copy_activity_log_settings: ~azure.mgmt.datafactory.models.CopyActivityLogSettings :param log_location_settings: Required. Log location settings customer needs to provide when enabling log. - :type log_location_settings: ~data_factory_management_client.models.LogLocationSettings + :type log_location_settings: ~azure.mgmt.datafactory.models.LogLocationSettings """ _validation = { @@ -20290,7 +20952,7 @@ class LogStorageSettings(msrest.serialization.Model): collection. :type additional_properties: dict[str, object] :param linked_service_name: Required. Log storage linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param path: The path to storage for storing detailed logs of activity execution. Type: string (or Expression with resultType string). :type path: object @@ -20341,17 +21003,17 @@ class LookupActivity(ExecutionActivity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param linked_service_name: Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. - :type policy: ~data_factory_management_client.models.ActivityPolicy + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param source: Required. Dataset-specific source properties, same as copy activity source. - :type source: ~data_factory_management_client.models.CopySource + :type source: ~azure.mgmt.datafactory.models.CopySource :param dataset: Required. Lookup activity dataset reference. - :type dataset: ~data_factory_management_client.models.DatasetReference + :type dataset: ~azure.mgmt.datafactory.models.DatasetReference :param first_row_only: Whether to return first row or all rows. Default value is true. Type: boolean (or Expression with resultType boolean). :type first_row_only: object @@ -20400,17 +21062,17 @@ class MagentoLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param host: Required. The URL of the Magento instance. (i.e. 192.168.222.110/magento3). :type host: object :param access_token: The access token from Magento. - :type access_token: ~data_factory_management_client.models.SecretBase + :type access_token: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. :type use_encrypted_endpoints: object @@ -20480,14 +21142,14 @@ class MagentoObjectDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ @@ -20538,12 +21200,15 @@ class MagentoSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -20559,8 +21224,9 @@ class MagentoSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -20573,6 +21239,45 @@ def __init__( self.query = kwargs.get('query', None) +class ManagedIdentityCredential(Credential): + """Managed identity credential. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of credential.Constant filled by server. + :type type: str + :param description: Credential description. + :type description: str + :param annotations: List of tags that can be used for describing the Credential. + :type annotations: list[object] + :param resource_id: The resource id of user assigned managed identity. + :type resource_id: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'resource_id': {'key': 'typeProperties.resourceId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ManagedIdentityCredential, self).__init__(**kwargs) + self.type = 'ManagedIdentity' # type: str + self.resource_id = kwargs.get('resource_id', None) + + class ManagedIntegrationRuntime(IntegrationRuntime): """Managed integration runtime, including managed elastic and managed dedicated integration runtimes. @@ -20585,21 +21290,19 @@ class ManagedIntegrationRuntime(IntegrationRuntime): :type additional_properties: dict[str, object] :param type: Required. Type of integration runtime.Constant filled by server. Possible values include: "Managed", "SelfHosted". - :type type: str or ~data_factory_management_client.models.IntegrationRuntimeType + :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType :param description: Integration runtime description. :type description: str :ivar state: Integration runtime state, only valid for managed dedicated integration runtime. Possible values include: "Initial", "Stopped", "Started", "Starting", "Stopping", "NeedRegistration", "Online", "Limited", "Offline", "AccessDenied". - :vartype state: str or ~data_factory_management_client.models.IntegrationRuntimeState + :vartype state: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeState :param managed_virtual_network: Managed Virtual Network reference. - :type managed_virtual_network: - ~data_factory_management_client.models.ManagedVirtualNetworkReference + :type managed_virtual_network: ~azure.mgmt.datafactory.models.ManagedVirtualNetworkReference :param compute_properties: The compute resource for managed integration runtime. - :type compute_properties: - ~data_factory_management_client.models.IntegrationRuntimeComputeProperties + :type compute_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeComputeProperties :param ssis_properties: SSIS properties for managed integration runtime. - :type ssis_properties: ~data_factory_management_client.models.IntegrationRuntimeSsisProperties + :type ssis_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisProperties """ _validation = { @@ -20686,10 +21389,9 @@ class ManagedIntegrationRuntimeNode(msrest.serialization.Model): :vartype node_id: str :ivar status: The managed integration runtime node status. Possible values include: "Starting", "Available", "Recycling", "Unavailable". - :vartype status: str or - ~data_factory_management_client.models.ManagedIntegrationRuntimeNodeStatus + :vartype status: str or ~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeNodeStatus :param errors: The errors that occurred on this integration runtime node. - :type errors: list[~data_factory_management_client.models.ManagedIntegrationRuntimeError] + :type errors: list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeError] """ _validation = { @@ -20782,23 +21484,22 @@ class ManagedIntegrationRuntimeStatus(IntegrationRuntimeStatus): :type additional_properties: dict[str, object] :param type: Required. Type of integration runtime.Constant filled by server. Possible values include: "Managed", "SelfHosted". - :type type: str or ~data_factory_management_client.models.IntegrationRuntimeType + :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType :ivar data_factory_name: The data factory name which the integration runtime belong to. :vartype data_factory_name: str :ivar state: The state of integration runtime. Possible values include: "Initial", "Stopped", "Started", "Starting", "Stopping", "NeedRegistration", "Online", "Limited", "Offline", "AccessDenied". - :vartype state: str or ~data_factory_management_client.models.IntegrationRuntimeState + :vartype state: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeState :ivar create_time: The time at which the integration runtime was created, in ISO8601 format. :vartype create_time: ~datetime.datetime :ivar nodes: The list of nodes for managed integration runtime. - :vartype nodes: list[~data_factory_management_client.models.ManagedIntegrationRuntimeNode] + :vartype nodes: list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeNode] :ivar other_errors: The errors that occurred on this integration runtime. - :vartype other_errors: - list[~data_factory_management_client.models.ManagedIntegrationRuntimeError] + :vartype other_errors: list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeError] :ivar last_operation: The last operation result that occurred on this integration runtime. :vartype last_operation: - ~data_factory_management_client.models.ManagedIntegrationRuntimeOperationResult + ~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeOperationResult """ _validation = { @@ -20843,7 +21544,7 @@ class ManagedPrivateEndpoint(msrest.serialization.Model): collection. :type additional_properties: dict[str, object] :param connection_state: The managed private endpoint connection state. - :type connection_state: ~data_factory_management_client.models.ConnectionStateProperties + :type connection_state: ~azure.mgmt.datafactory.models.ConnectionStateProperties :param fqdns: Fully qualified domain names. :type fqdns: list[str] :param group_id: The groupId to which the managed private endpoint is created. @@ -20892,7 +21593,7 @@ class ManagedPrivateEndpointListResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. :param value: Required. List of managed private endpoints. - :type value: list[~data_factory_management_client.models.ManagedPrivateEndpointResource] + :type value: list[~azure.mgmt.datafactory.models.ManagedPrivateEndpointResource] :param next_link: The link to the next page of results, if any remaining results exist. :type next_link: str """ @@ -20931,7 +21632,7 @@ class ManagedPrivateEndpointResource(SubResource): :ivar etag: Etag identifies change in the resource. :vartype etag: str :param properties: Required. Managed private endpoint properties. - :type properties: ~data_factory_management_client.models.ManagedPrivateEndpoint + :type properties: ~azure.mgmt.datafactory.models.ManagedPrivateEndpoint """ _validation = { @@ -20999,7 +21700,7 @@ class ManagedVirtualNetworkListResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. :param value: Required. List of managed Virtual Networks. - :type value: list[~data_factory_management_client.models.ManagedVirtualNetworkResource] + :type value: list[~azure.mgmt.datafactory.models.ManagedVirtualNetworkResource] :param next_link: The link to the next page of results, if any remaining results exist. :type next_link: str """ @@ -21072,7 +21773,7 @@ class ManagedVirtualNetworkResource(SubResource): :ivar etag: Etag identifies change in the resource. :vartype etag: str :param properties: Required. Managed Virtual Network properties. - :type properties: ~data_factory_management_client.models.ManagedVirtualNetwork + :type properties: ~azure.mgmt.datafactory.models.ManagedVirtualNetwork """ _validation = { @@ -21102,7 +21803,9 @@ def __init__( class MappingDataFlow(DataFlow): """Mapping data flow. - :param type: Type of data flow.Constant filled by server. + All required parameters must be populated in order to send to Azure. + + :param type: Required. Type of data flow.Constant filled by server. :type type: str :param description: The description of the data flow. :type description: str @@ -21110,17 +21813,21 @@ class MappingDataFlow(DataFlow): :type annotations: list[object] :param folder: The folder that this data flow is in. If not specified, Data flow will appear at the root level. - :type folder: ~data_factory_management_client.models.DataFlowFolder + :type folder: ~azure.mgmt.datafactory.models.DataFlowFolder :param sources: List of sources in data flow. - :type sources: list[~data_factory_management_client.models.DataFlowSource] + :type sources: list[~azure.mgmt.datafactory.models.DataFlowSource] :param sinks: List of sinks in data flow. - :type sinks: list[~data_factory_management_client.models.DataFlowSink] + :type sinks: list[~azure.mgmt.datafactory.models.DataFlowSink] :param transformations: List of transformations in data flow. - :type transformations: list[~data_factory_management_client.models.Transformation] + :type transformations: list[~azure.mgmt.datafactory.models.Transformation] :param script: DataFlow script. :type script: str """ + _validation = { + 'type': {'required': True}, + } + _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, @@ -21155,18 +21862,18 @@ class MariaDbLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param pwd: The Azure key vault secret reference of password in connection string. - :type pwd: ~data_factory_management_client.models.AzureKeyVaultSecretReference + :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -21219,12 +21926,15 @@ class MariaDbSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -21240,8 +21950,9 @@ class MariaDbSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -21273,14 +21984,14 @@ class MariaDbTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ @@ -21323,11 +22034,11 @@ class MarketoLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param endpoint: Required. The endpoint of the Marketo server. (i.e. 123-ABC-321.mktorest.com). @@ -21335,7 +22046,7 @@ class MarketoLinkedService(LinkedService): :param client_id: Required. The client Id of your Marketo service. :type client_id: object :param client_secret: The client secret of your Marketo service. - :type client_secret: ~data_factory_management_client.models.SecretBase + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. :type use_encrypted_endpoints: object @@ -21408,14 +22119,14 @@ class MarketoObjectDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ @@ -21466,12 +22177,15 @@ class MarketoSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -21487,8 +22201,9 @@ class MarketoSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -21501,6 +22216,29 @@ def __init__( self.query = kwargs.get('query', None) +class MetadataItem(msrest.serialization.Model): + """Specify the name and value of custom metadata item. + + :param name: Metadata item key name. Type: string (or Expression with resultType string). + :type name: object + :param value: Metadata item value. Type: string (or Expression with resultType string). + :type value: object + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'object'}, + 'value': {'key': 'value', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(MetadataItem, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.value = kwargs.get('value', None) + + class MicrosoftAccessLinkedService(LinkedService): """Microsoft Access linked service. @@ -21512,11 +22250,11 @@ class MicrosoftAccessLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: Required. The non-access credential portion of the connection string @@ -21529,12 +22267,12 @@ class MicrosoftAccessLinkedService(LinkedService): :type authentication_type: object :param credential: The access credential portion of the connection string specified in driver- specific property-value format. - :type credential: ~data_factory_management_client.models.SecretBase + :type credential: ~azure.mgmt.datafactory.models.SecretBase :param user_name: User name for Basic authentication. Type: string (or Expression with resultType string). :type user_name: object :param password: Password for Basic authentication. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -21600,6 +22338,9 @@ class MicrosoftAccessSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). :type pre_copy_script: object @@ -21617,6 +22358,7 @@ class MicrosoftAccessSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -21648,11 +22390,14 @@ class MicrosoftAccessSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -21665,8 +22410,9 @@ class MicrosoftAccessSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -21698,14 +22444,14 @@ class MicrosoftAccessTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The Microsoft Access table name. Type: string (or Expression with resultType string). :type table_name: object @@ -21757,14 +22503,14 @@ class MongoDbAtlasCollectionDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param collection: Required. The collection name of the MongoDB Atlas database. Type: string (or Expression with resultType string). :type collection: object @@ -21809,11 +22555,11 @@ class MongoDbAtlasLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: Required. The MongoDB Atlas connection string. Type: string, @@ -21852,6 +22598,65 @@ def __init__( self.database = kwargs['database'] +class MongoDbAtlasSink(CopySink): + """A copy activity MongoDB Atlas sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object + :param write_behavior: Specifies whether the document with same key to be overwritten (upsert) + rather than throw exception (insert). The default value is "insert". Type: string (or + Expression with resultType string). Type: string (or Expression with resultType string). + :type write_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(MongoDbAtlasSink, self).__init__(**kwargs) + self.type = 'MongoDbAtlasSink' # type: str + self.write_behavior = kwargs.get('write_behavior', None) + + class MongoDbAtlasSource(CopySource): """A copy activity source for a MongoDB Atlas database. @@ -21871,12 +22676,15 @@ class MongoDbAtlasSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param filter: Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). :type filter: object :param cursor_methods: Cursor methods for Mongodb query. - :type cursor_methods: ~data_factory_management_client.models.MongoDbCursorMethodsProperties + :type cursor_methods: ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties :param batch_size: Specifies the number of documents to return in each batch of the response from MongoDB Atlas instance. In most cases, modifying the batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response @@ -21886,8 +22694,8 @@ class MongoDbAtlasSource(CopySource): pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -21900,11 +22708,12 @@ class MongoDbAtlasSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'filter': {'key': 'filter', 'type': 'object'}, 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -21939,14 +22748,14 @@ class MongoDbCollectionDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param collection_name: Required. The table name of the MongoDB database. Type: string (or Expression with resultType string). :type collection_name: object @@ -22034,11 +22843,11 @@ class MongoDbLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param server: Required. The IP address or server name of the MongoDB server. Type: string (or @@ -22046,8 +22855,7 @@ class MongoDbLinkedService(LinkedService): :type server: object :param authentication_type: The authentication type to be used to connect to the MongoDB database. Possible values include: "Basic", "Anonymous". - :type authentication_type: str or - ~data_factory_management_client.models.MongoDbAuthenticationType + :type authentication_type: str or ~azure.mgmt.datafactory.models.MongoDbAuthenticationType :param database_name: Required. The name of the MongoDB database that you want to access. Type: string (or Expression with resultType string). :type database_name: object @@ -22055,7 +22863,7 @@ class MongoDbLinkedService(LinkedService): string). :type username: object :param password: Password for authentication. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param auth_source: Database to verify the username and password. Type: string (or Expression with resultType string). :type auth_source: object @@ -22136,12 +22944,15 @@ class MongoDbSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: Database query. Should be a SQL-92 query expression. Type: string (or Expression with resultType string). :type query: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -22154,8 +22965,9 @@ class MongoDbSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -22187,14 +22999,14 @@ class MongoDbV2CollectionDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param collection: Required. The collection name of the MongoDB database. Type: string (or Expression with resultType string). :type collection: object @@ -22239,11 +23051,11 @@ class MongoDbV2LinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: Required. The MongoDB connection string. Type: string, SecureString @@ -22281,6 +23093,65 @@ def __init__( self.database = kwargs['database'] +class MongoDbV2Sink(CopySink): + """A copy activity MongoDB sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object + :param write_behavior: Specifies whether the document with same key to be overwritten (upsert) + rather than throw exception (insert). The default value is "insert". Type: string (or + Expression with resultType string). Type: string (or Expression with resultType string). + :type write_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(MongoDbV2Sink, self).__init__(**kwargs) + self.type = 'MongoDbV2Sink' # type: str + self.write_behavior = kwargs.get('write_behavior', None) + + class MongoDbV2Source(CopySource): """A copy activity source for a MongoDB database. @@ -22300,12 +23171,15 @@ class MongoDbV2Source(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param filter: Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). :type filter: object :param cursor_methods: Cursor methods for Mongodb query. - :type cursor_methods: ~data_factory_management_client.models.MongoDbCursorMethodsProperties + :type cursor_methods: ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties :param batch_size: Specifies the number of documents to return in each batch of the response from MongoDB instance. In most cases, modifying the batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response size. @@ -22315,8 +23189,8 @@ class MongoDbV2Source(CopySource): pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -22329,11 +23203,12 @@ class MongoDbV2Source(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'filter': {'key': 'filter', 'type': 'object'}, 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -22360,17 +23235,17 @@ class MySqlLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: Required. The connection string. :type connection_string: object :param password: The Azure key vault secret reference of password in connection string. - :type password: ~data_factory_management_client.models.AzureKeyVaultSecretReference + :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -22424,12 +23299,15 @@ class MySqlSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object """ @@ -22444,8 +23322,9 @@ class MySqlSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -22477,14 +23356,14 @@ class MySqlTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The MySQL table name. Type: string (or Expression with resultType string). :type table_name: object """ @@ -22527,18 +23406,18 @@ class NetezzaLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param pwd: The Azure key vault secret reference of password in connection string. - :type pwd: ~data_factory_management_client.models.AzureKeyVaultSecretReference + :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -22623,12 +23502,15 @@ class NetezzaSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -22636,7 +23518,7 @@ class NetezzaSource(TabularSource): parallel. Possible values include: "None", "DataSlice", "DynamicRange". :type partition_option: object :param partition_settings: The settings that will be leveraged for Netezza source partitioning. - :type partition_settings: ~data_factory_management_client.models.NetezzaPartitionSettings + :type partition_settings: ~azure.mgmt.datafactory.models.NetezzaPartitionSettings """ _validation = { @@ -22649,8 +23531,9 @@ class NetezzaSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'object'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'NetezzaPartitionSettings'}, @@ -22686,14 +23569,14 @@ class NetezzaTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. :type table_name: object @@ -22747,11 +23630,11 @@ class ODataLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param url: Required. The URL of the OData service endpoint. Type: string (or Expression with @@ -22760,13 +23643,12 @@ class ODataLinkedService(LinkedService): :param authentication_type: Type of authentication used to connect to the OData service. Possible values include: "Basic", "Anonymous", "Windows", "AadServicePrincipal", "ManagedServiceIdentity". - :type authentication_type: str or - ~data_factory_management_client.models.ODataAuthenticationType + :type authentication_type: str or ~azure.mgmt.datafactory.models.ODataAuthenticationType :param user_name: User name of the OData service. Type: string (or Expression with resultType string). :type user_name: object :param password: Password of the OData service. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param auth_headers: The additional HTTP headers in the request to RESTful API used for authorization. Type: object (or Expression with resultType object). :type auth_headers: object @@ -22786,19 +23668,18 @@ class ODataLinkedService(LinkedService): :param aad_service_principal_credential_type: Specify the credential type (key or cert) is used for service principal. Possible values include: "ServicePrincipalKey", "ServicePrincipalCert". :type aad_service_principal_credential_type: str or - ~data_factory_management_client.models.ODataAadServicePrincipalCredentialType + ~azure.mgmt.datafactory.models.ODataAadServicePrincipalCredentialType :param service_principal_key: Specify the secret of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). - :type service_principal_key: ~data_factory_management_client.models.SecretBase + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param service_principal_embedded_cert: Specify the base64 encoded certificate of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). - :type service_principal_embedded_cert: ~data_factory_management_client.models.SecretBase + :type service_principal_embedded_cert: ~azure.mgmt.datafactory.models.SecretBase :param service_principal_embedded_cert_password: Specify the password of your certificate if your certificate has a password and you are using AadServicePrincipal authentication. Type: string (or Expression with resultType string). - :type service_principal_embedded_cert_password: - ~data_factory_management_client.models.SecretBase + :type service_principal_embedded_cert_password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -22874,14 +23755,14 @@ class ODataResourceDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param path: The OData resource path. Type: string (or Expression with resultType string). :type path: object """ @@ -22932,6 +23813,9 @@ class ODataSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: OData query. For example, "$top=1". Type: string (or Expression with resultType string). :type query: object @@ -22941,8 +23825,8 @@ class ODataSource(CopySource): ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type http_request_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -22955,9 +23839,10 @@ class ODataSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -22982,11 +23867,11 @@ class OdbcLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: Required. The non-access credential portion of the connection string @@ -22998,12 +23883,12 @@ class OdbcLinkedService(LinkedService): :type authentication_type: object :param credential: The access credential portion of the connection string specified in driver- specific property-value format. - :type credential: ~data_factory_management_client.models.SecretBase + :type credential: ~azure.mgmt.datafactory.models.SecretBase :param user_name: User name for Basic authentication. Type: string (or Expression with resultType string). :type user_name: object :param password: Password for Basic authentication. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -23069,6 +23954,9 @@ class OdbcSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). :type pre_copy_script: object @@ -23086,6 +23974,7 @@ class OdbcSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -23117,12 +24006,15 @@ class OdbcSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object """ @@ -23137,8 +24029,9 @@ class OdbcSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -23170,14 +24063,14 @@ class OdbcTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The ODBC table name. Type: string (or Expression with resultType string). :type table_name: object """ @@ -23228,14 +24121,14 @@ class Office365Dataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: Required. Name of the dataset to extract from Office 365. Type: string (or Expression with resultType string). :type table_name: object @@ -23285,11 +24178,11 @@ class Office365LinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param office365_tenant_id: Required. Azure tenant ID to which the Office 365 account belongs. @@ -23302,7 +24195,7 @@ class Office365LinkedService(LinkedService): Expression with resultType string). :type service_principal_id: object :param service_principal_key: Required. Specify the application's key. - :type service_principal_key: ~data_factory_management_client.models.SecretBase + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -23363,6 +24256,9 @@ class Office365Source(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param allowed_groups: The groups containing all the users. Type: array of strings (or Expression with resultType array of strings). :type allowed_groups: object @@ -23394,6 +24290,7 @@ class Office365Source(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'allowed_groups': {'key': 'allowedGroups', 'type': 'object'}, 'user_scope_filter_uri': {'key': 'userScopeFilterUri', 'type': 'object'}, 'date_filter_column': {'key': 'dateFilterColumn', 'type': 'object'}, @@ -23424,10 +24321,9 @@ class Operation(msrest.serialization.Model): :param origin: The intended executor of the operation. :type origin: str :param display: Metadata associated with the operation. - :type display: ~data_factory_management_client.models.OperationDisplay + :type display: ~azure.mgmt.datafactory.models.OperationDisplay :param service_specification: Details about a service operation. - :type service_specification: - ~data_factory_management_client.models.OperationServiceSpecification + :type service_specification: ~azure.mgmt.datafactory.models.OperationServiceSpecification """ _attribute_map = { @@ -23483,7 +24379,7 @@ class OperationListResponse(msrest.serialization.Model): """A list of operations that can be performed by the Data Factory service. :param value: List of Data Factory operations supported by the Data Factory resource provider. - :type value: list[~data_factory_management_client.models.Operation] + :type value: list[~azure.mgmt.datafactory.models.Operation] :param next_link: The link to the next page of results, if any remaining results exist. :type next_link: str """ @@ -23599,9 +24495,9 @@ class OperationMetricSpecification(msrest.serialization.Model): :param source_mdm_namespace: The name of the MDM namespace. :type source_mdm_namespace: str :param availabilities: Defines how often data for metrics becomes available. - :type availabilities: list[~data_factory_management_client.models.OperationMetricAvailability] + :type availabilities: list[~azure.mgmt.datafactory.models.OperationMetricAvailability] :param dimensions: Defines the metric dimension. - :type dimensions: list[~data_factory_management_client.models.OperationMetricDimension] + :type dimensions: list[~azure.mgmt.datafactory.models.OperationMetricDimension] """ _attribute_map = { @@ -23638,11 +24534,9 @@ class OperationServiceSpecification(msrest.serialization.Model): """Details about a service operation. :param log_specifications: Details about operations related to logs. - :type log_specifications: - list[~data_factory_management_client.models.OperationLogSpecification] + :type log_specifications: list[~azure.mgmt.datafactory.models.OperationLogSpecification] :param metric_specifications: Details about operations related to metrics. - :type metric_specifications: - list[~data_factory_management_client.models.OperationMetricSpecification] + :type metric_specifications: list[~azure.mgmt.datafactory.models.OperationMetricSpecification] """ _attribute_map = { @@ -23670,11 +24564,11 @@ class OracleCloudStorageLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param access_key_id: The access key identifier of the Oracle Cloud Storage Identity and Access @@ -23682,7 +24576,7 @@ class OracleCloudStorageLinkedService(LinkedService): :type access_key_id: object :param secret_access_key: The secret access key of the Oracle Cloud Storage Identity and Access Management (IAM) user. - :type secret_access_key: ~data_factory_management_client.models.SecretBase + :type secret_access_key: ~azure.mgmt.datafactory.models.SecretBase :param service_url: This value specifies the endpoint to access with the Oracle Cloud Storage Connector. This is an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType @@ -23783,6 +24677,9 @@ class OracleCloudStorageReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -23823,6 +24720,7 @@ class OracleCloudStorageReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -23864,18 +24762,18 @@ class OracleLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: Required. The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param password: The Azure key vault secret reference of password in connection string. - :type password: ~data_factory_management_client.models.AzureKeyVaultSecretReference + :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -23957,11 +24855,11 @@ class OracleServiceCloudLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param host: Required. The URL of the Oracle Service Cloud instance. @@ -23970,7 +24868,7 @@ class OracleServiceCloudLinkedService(LinkedService): :type username: object :param password: Required. The password corresponding to the user name that you provided in the username key. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). :type use_encrypted_endpoints: object @@ -24045,14 +24943,14 @@ class OracleServiceCloudObjectDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ @@ -24103,12 +25001,15 @@ class OracleServiceCloudSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -24124,8 +25025,9 @@ class OracleServiceCloudSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -24163,6 +25065,9 @@ class OracleSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). :type pre_copy_script: object @@ -24180,6 +25085,7 @@ class OracleSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -24211,6 +25117,9 @@ class OracleSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param oracle_reader_query: Oracle reader query. Type: string (or Expression with resultType string). :type oracle_reader_query: object @@ -24221,10 +25130,10 @@ class OracleSource(CopySource): Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". :type partition_option: object :param partition_settings: The settings that will be leveraged for Oracle source partitioning. - :type partition_settings: ~data_factory_management_client.models.OraclePartitionSettings + :type partition_settings: ~azure.mgmt.datafactory.models.OraclePartitionSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -24237,11 +25146,12 @@ class OracleSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'oracle_reader_query': {'key': 'oracleReaderQuery', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'object'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'OraclePartitionSettings'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -24276,14 +25186,14 @@ class OracleTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. :type table_name: object @@ -24345,18 +25255,19 @@ class OrcDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param location: The location of the ORC data storage. - :type location: ~data_factory_management_client.models.DatasetLocation - :param orc_compression_codec: Possible values include: "none", "zlib", "snappy", "lzo". - :type orc_compression_codec: str or ~data_factory_management_client.models.OrcCompressionCodec + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param orc_compression_codec: The data orcCompressionCodec. Type: string (or Expression with + resultType string). + :type orc_compression_codec: object """ _validation = { @@ -24375,7 +25286,7 @@ class OrcDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'orc_compression_codec': {'key': 'typeProperties.orcCompressionCodec', 'type': 'str'}, + 'orc_compression_codec': {'key': 'typeProperties.orcCompressionCodec', 'type': 'object'}, } def __init__( @@ -24448,10 +25359,13 @@ class OrcSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: ORC store settings. - :type store_settings: ~data_factory_management_client.models.StoreWriteSettings + :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings :param format_settings: ORC format settings. - :type format_settings: ~data_factory_management_client.models.OrcWriteSettings + :type format_settings: ~azure.mgmt.datafactory.models.OrcWriteSettings """ _validation = { @@ -24466,6 +25380,7 @@ class OrcSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'OrcWriteSettings'}, } @@ -24499,11 +25414,14 @@ class OrcSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: ORC store settings. - :type store_settings: ~data_factory_management_client.models.StoreReadSettings + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -24516,8 +25434,9 @@ class OrcSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -24578,7 +25497,7 @@ class PackageStore(msrest.serialization.Model): :param name: Required. The name of the package store. :type name: str :param package_store_linked_service: Required. The package store linked service reference. - :type package_store_linked_service: ~data_factory_management_client.models.EntityReference + :type package_store_linked_service: ~azure.mgmt.datafactory.models.EntityReference """ _validation = { @@ -24607,7 +25526,7 @@ class ParameterSpecification(msrest.serialization.Model): :param type: Required. Parameter type. Possible values include: "Object", "String", "Int", "Float", "Bool", "Array", "SecureString". - :type type: str or ~data_factory_management_client.models.ParameterType + :type type: str or ~azure.mgmt.datafactory.models.ParameterType :param default_value: Default value of parameter. :type default_value: object """ @@ -24649,19 +25568,19 @@ class ParquetDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param location: The location of the parquet storage. - :type location: ~data_factory_management_client.models.DatasetLocation - :param compression_codec: Possible values include: "none", "gzip", "snappy", "lzo", "bzip2", - "deflate", "zipDeflate", "lz4", "tar", "tarGZip". - :type compression_codec: str or ~data_factory_management_client.models.CompressionCodec + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param compression_codec: The data compressionCodec. Type: string (or Expression with + resultType string). + :type compression_codec: object """ _validation = { @@ -24680,7 +25599,7 @@ class ParquetDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'str'}, + 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, } def __init__( @@ -24753,10 +25672,13 @@ class ParquetSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Parquet store settings. - :type store_settings: ~data_factory_management_client.models.StoreWriteSettings + :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings :param format_settings: Parquet format settings. - :type format_settings: ~data_factory_management_client.models.ParquetWriteSettings + :type format_settings: ~azure.mgmt.datafactory.models.ParquetWriteSettings """ _validation = { @@ -24771,6 +25693,7 @@ class ParquetSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'ParquetWriteSettings'}, } @@ -24804,11 +25727,14 @@ class ParquetSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Parquet store settings. - :type store_settings: ~data_factory_management_client.models.StoreReadSettings + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -24821,8 +25747,9 @@ class ParquetSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -24886,11 +25813,11 @@ class PaypalLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param host: Required. The URL of the PayPal instance. (i.e. api.sandbox.paypal.com). @@ -24898,7 +25825,7 @@ class PaypalLinkedService(LinkedService): :param client_id: Required. The client ID associated with your PayPal application. :type client_id: object :param client_secret: The client secret associated with your PayPal application. - :type client_secret: ~data_factory_management_client.models.SecretBase + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. :type use_encrypted_endpoints: object @@ -24971,14 +25898,14 @@ class PaypalObjectDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ @@ -25029,12 +25956,15 @@ class PaypalSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -25050,8 +25980,9 @@ class PaypalSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -25075,11 +26006,11 @@ class PhoenixLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param host: Required. The IP address or host name of the Phoenix server. (i.e. @@ -25095,12 +26026,11 @@ class PhoenixLinkedService(LinkedService): :param authentication_type: Required. The authentication mechanism used to connect to the Phoenix server. Possible values include: "Anonymous", "UsernameAndPassword", "WindowsAzureHDInsightService". - :type authentication_type: str or - ~data_factory_management_client.models.PhoenixAuthenticationType + :type authentication_type: str or ~azure.mgmt.datafactory.models.PhoenixAuthenticationType :param username: The user name used to connect to the Phoenix server. :type username: object :param password: The password corresponding to the user name. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. :type enable_ssl: object @@ -25189,14 +26119,14 @@ class PhoenixObjectDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. :type table_name: object @@ -25258,12 +26188,15 @@ class PhoenixSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -25279,8 +26212,9 @@ class PhoenixSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -25337,7 +26271,7 @@ class PipelineListResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. :param value: Required. List of pipelines. - :type value: list[~data_factory_management_client.models.PipelineResource] + :type value: list[~azure.mgmt.datafactory.models.PipelineResource] :param next_link: The link to the next page of results, if any remaining results exist. :type next_link: str """ @@ -25364,8 +26298,7 @@ class PipelinePolicy(msrest.serialization.Model): """Pipeline Policy. :param elapsed_time_metric: Pipeline ElapsedTime Metric Policy. - :type elapsed_time_metric: - ~data_factory_management_client.models.PipelineElapsedTimeMetricPolicy + :type elapsed_time_metric: ~azure.mgmt.datafactory.models.PipelineElapsedTimeMetricPolicy """ _attribute_map = { @@ -25436,11 +26369,11 @@ class PipelineResource(SubResource): :param description: The description of the pipeline. :type description: str :param activities: List of activities in pipeline. - :type activities: list[~data_factory_management_client.models.Activity] + :type activities: list[~azure.mgmt.datafactory.models.Activity] :param parameters: List of parameters for pipeline. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param variables: List of variables for pipeline. - :type variables: dict[str, ~data_factory_management_client.models.VariableSpecification] + :type variables: dict[str, ~azure.mgmt.datafactory.models.VariableSpecification] :param concurrency: The max number of concurrent runs for the pipeline. :type concurrency: int :param annotations: List of tags that can be used for describing the Pipeline. @@ -25449,9 +26382,9 @@ class PipelineResource(SubResource): :type run_dimensions: dict[str, object] :param folder: The folder that this Pipeline is in. If not specified, Pipeline will appear at the root level. - :type folder: ~data_factory_management_client.models.PipelineFolder + :type folder: ~azure.mgmt.datafactory.models.PipelineFolder :param policy: Pipeline Policy. - :type policy: ~data_factory_management_client.models.PipelinePolicy + :type policy: ~azure.mgmt.datafactory.models.PipelinePolicy """ _validation = { @@ -25518,7 +26451,7 @@ class PipelineRun(msrest.serialization.Model): :ivar run_dimensions: Run dimensions emitted by Pipeline run. :vartype run_dimensions: dict[str, str] :ivar invoked_by: Entity that started the pipeline run. - :vartype invoked_by: ~data_factory_management_client.models.PipelineRunInvokedBy + :vartype invoked_by: ~azure.mgmt.datafactory.models.PipelineRunInvokedBy :ivar last_updated: The last updated timestamp for the pipeline run event in ISO8601 format. :vartype last_updated: ~datetime.datetime :ivar run_start: The start time of a pipeline run in ISO8601 format. @@ -25598,18 +26531,26 @@ class PipelineRunInvokedBy(msrest.serialization.Model): :vartype id: str :ivar invoked_by_type: The type of the entity that started the run. :vartype invoked_by_type: str + :ivar pipeline_name: The name of the pipeline that triggered the run, if any. + :vartype pipeline_name: str + :ivar pipeline_run_id: The run id of the pipeline that triggered the run, if any. + :vartype pipeline_run_id: str """ _validation = { 'name': {'readonly': True}, 'id': {'readonly': True}, 'invoked_by_type': {'readonly': True}, + 'pipeline_name': {'readonly': True}, + 'pipeline_run_id': {'readonly': True}, } _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'invoked_by_type': {'key': 'invokedByType', 'type': 'str'}, + 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, + 'pipeline_run_id': {'key': 'pipelineRunId', 'type': 'str'}, } def __init__( @@ -25620,6 +26561,8 @@ def __init__( self.name = None self.id = None self.invoked_by_type = None + self.pipeline_name = None + self.pipeline_run_id = None class PipelineRunsQueryResponse(msrest.serialization.Model): @@ -25628,7 +26571,7 @@ class PipelineRunsQueryResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. :param value: Required. List of pipeline runs. - :type value: list[~data_factory_management_client.models.PipelineRun] + :type value: list[~azure.mgmt.datafactory.models.PipelineRun] :param continuation_token: The continuation token for getting the next page of results, if any remaining results exist, null otherwise. :type continuation_token: str @@ -25659,7 +26602,7 @@ class PolybaseSettings(msrest.serialization.Model): collection. :type additional_properties: dict[str, object] :param reject_type: Reject type. Possible values include: "value", "percentage". - :type reject_type: str or ~data_factory_management_client.models.PolybaseSettingsRejectType + :type reject_type: str or ~azure.mgmt.datafactory.models.PolybaseSettingsRejectType :param reject_value: Specifies the value or the percentage of rows that can be rejected before the query fails. Type: number (or Expression with resultType number), minimum: 0. :type reject_value: object @@ -25704,17 +26647,17 @@ class PostgreSqlLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: Required. The connection string. :type connection_string: object :param password: The Azure key vault secret reference of password in connection string. - :type password: ~data_factory_management_client.models.AzureKeyVaultSecretReference + :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -25768,12 +26711,15 @@ class PostgreSqlSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object """ @@ -25788,8 +26734,9 @@ class PostgreSqlSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -25821,14 +26768,14 @@ class PostgreSqlTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. :type table_name: object @@ -25881,11 +26828,11 @@ class PrestoLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param host: Required. The IP address or host name of the Presto server. (i.e. @@ -25900,12 +26847,11 @@ class PrestoLinkedService(LinkedService): :type port: object :param authentication_type: Required. The authentication mechanism used to connect to the Presto server. Possible values include: "Anonymous", "LDAP". - :type authentication_type: str or - ~data_factory_management_client.models.PrestoAuthenticationType + :type authentication_type: str or ~azure.mgmt.datafactory.models.PrestoAuthenticationType :param username: The user name used to connect to the Presto server. :type username: object :param password: The password corresponding to the user name. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. :type enable_ssl: object @@ -26003,14 +26949,14 @@ class PrestoObjectDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. :type table_name: object @@ -26072,12 +27018,15 @@ class PrestoSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -26093,8 +27042,9 @@ class PrestoSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -26113,7 +27063,7 @@ class PrivateEndpointConnectionListResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. :param value: Required. List of Private Endpoint Connections. - :type value: list[~data_factory_management_client.models.PrivateEndpointConnectionResource] + :type value: list[~azure.mgmt.datafactory.models.PrivateEndpointConnectionResource] :param next_link: The link to the next page of results, if any remaining results exist. :type next_link: str """ @@ -26150,7 +27100,7 @@ class PrivateEndpointConnectionResource(SubResource): :ivar etag: Etag identifies change in the resource. :vartype etag: str :param properties: Core resource properties. - :type properties: ~data_factory_management_client.models.RemotePrivateEndpointConnection + :type properties: ~azure.mgmt.datafactory.models.RemotePrivateEndpointConnection """ _validation = { @@ -26181,7 +27131,7 @@ class PrivateLinkConnectionApprovalRequest(msrest.serialization.Model): :param private_link_service_connection_state: The state of a private link connection. :type private_link_service_connection_state: - ~data_factory_management_client.models.PrivateLinkConnectionState + ~azure.mgmt.datafactory.models.PrivateLinkConnectionState """ _attribute_map = { @@ -26210,7 +27160,7 @@ class PrivateLinkConnectionApprovalRequestResource(SubResource): :ivar etag: Etag identifies change in the resource. :vartype etag: str :param properties: Core resource properties. - :type properties: ~data_factory_management_client.models.PrivateLinkConnectionApprovalRequest + :type properties: ~azure.mgmt.datafactory.models.PrivateLinkConnectionApprovalRequest """ _validation = { @@ -26277,7 +27227,7 @@ class PrivateLinkResource(SubResource): :ivar etag: Etag identifies change in the resource. :vartype etag: str :param properties: Core resource properties. - :type properties: ~data_factory_management_client.models.PrivateLinkResourceProperties + :type properties: ~azure.mgmt.datafactory.models.PrivateLinkResourceProperties """ _validation = { @@ -26344,7 +27294,7 @@ class PrivateLinkResourcesWrapper(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. :param value: Required. - :type value: list[~data_factory_management_client.models.PrivateLinkResource] + :type value: list[~azure.mgmt.datafactory.models.PrivateLinkResource] """ _validation = { @@ -26367,7 +27317,7 @@ class QueryDataFlowDebugSessionsResponse(msrest.serialization.Model): """A list of active debug sessions. :param value: Array with all active debug sessions. - :type value: list[~data_factory_management_client.models.DataFlowDebugSessionInfo] + :type value: list[~azure.mgmt.datafactory.models.DataFlowDebugSessionInfo] :param next_link: The link to the next page of results, if any remaining results exist. :type next_link: str """ @@ -26397,11 +27347,11 @@ class QuickBooksLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_properties: Properties used to connect to QuickBooks. It is mutually @@ -26414,11 +27364,11 @@ class QuickBooksLinkedService(LinkedService): :param consumer_key: The consumer key for OAuth 1.0 authentication. :type consumer_key: object :param consumer_secret: The consumer secret for OAuth 1.0 authentication. - :type consumer_secret: ~data_factory_management_client.models.SecretBase + :type consumer_secret: ~azure.mgmt.datafactory.models.SecretBase :param access_token: The access token for OAuth 1.0 authentication. - :type access_token: ~data_factory_management_client.models.SecretBase + :type access_token: ~azure.mgmt.datafactory.models.SecretBase :param access_token_secret: The access token secret for OAuth 1.0 authentication. - :type access_token_secret: ~data_factory_management_client.models.SecretBase + :type access_token_secret: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. :type use_encrypted_endpoints: object @@ -26486,14 +27436,14 @@ class QuickBooksObjectDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ @@ -26544,12 +27494,15 @@ class QuickBooksSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -26565,8 +27518,9 @@ class QuickBooksSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -26590,12 +27544,11 @@ class RecurrenceSchedule(msrest.serialization.Model): :param hours: The hours. :type hours: list[int] :param week_days: The days of the week. - :type week_days: list[str or ~data_factory_management_client.models.DaysOfWeek] + :type week_days: list[str or ~azure.mgmt.datafactory.models.DaysOfWeek] :param month_days: The month days. :type month_days: list[int] :param monthly_occurrences: The monthly occurrences. - :type monthly_occurrences: - list[~data_factory_management_client.models.RecurrenceScheduleOccurrence] + :type monthly_occurrences: list[~azure.mgmt.datafactory.models.RecurrenceScheduleOccurrence] """ _attribute_map = { @@ -26628,7 +27581,7 @@ class RecurrenceScheduleOccurrence(msrest.serialization.Model): :type additional_properties: dict[str, object] :param day: The day of the week. Possible values include: "Sunday", "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday". - :type day: str or ~data_factory_management_client.models.DayOfWeek + :type day: str or ~azure.mgmt.datafactory.models.DayOfWeek :param occurrence: The occurrence. :type occurrence: int """ @@ -26694,7 +27647,7 @@ class RedshiftUnloadSettings(msrest.serialization.Model): :param s3_linked_service_name: Required. The name of the Amazon S3 linked service which will be used for the unload operation when copying from the Amazon Redshift source. - :type s3_linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type s3_linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param bucket_name: Required. The bucket of the interim Amazon S3 which will be used to store the unloaded data from Amazon Redshift source. The bucket must be in the same region as the Amazon Redshift source. Type: string (or Expression with resultType string). @@ -26739,11 +27692,14 @@ class RelationalSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -26756,8 +27712,9 @@ class RelationalSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -26789,14 +27746,14 @@ class RelationalTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The relational table name. Type: string (or Expression with resultType string). :type table_name: object @@ -26837,10 +27794,10 @@ class RemotePrivateEndpointConnection(msrest.serialization.Model): :ivar provisioning_state: :vartype provisioning_state: str :param private_endpoint: PrivateEndpoint of a remote private endpoint connection. - :type private_endpoint: ~data_factory_management_client.models.ArmIdWrapper + :type private_endpoint: ~azure.mgmt.datafactory.models.ArmIdWrapper :param private_link_service_connection_state: The state of a private link connection. :type private_link_service_connection_state: - ~data_factory_management_client.models.PrivateLinkConnectionState + ~azure.mgmt.datafactory.models.PrivateLinkConnectionState """ _validation = { @@ -26879,7 +27836,7 @@ class RerunTumblingWindowTrigger(Trigger): :type description: str :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". - :vartype runtime_state: str or ~data_factory_management_client.models.TriggerRuntimeState + :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState :param annotations: List of tags that can be used for describing the trigger. :type annotations: list[object] :param parent_trigger: Required. The parent trigger reference. @@ -26939,11 +27896,11 @@ class ResponsysLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param endpoint: Required. The endpoint of the Responsys server. @@ -26953,7 +27910,7 @@ class ResponsysLinkedService(LinkedService): :type client_id: object :param client_secret: The client secret associated with the Responsys application. Type: string (or Expression with resultType string). - :type client_secret: ~data_factory_management_client.models.SecretBase + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). :type use_encrypted_endpoints: object @@ -27027,14 +27984,14 @@ class ResponsysObjectDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ @@ -27085,12 +28042,15 @@ class ResponsysSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -27106,8 +28066,9 @@ class ResponsysSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -27139,14 +28100,14 @@ class RestResourceDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param relative_url: The relative URL to the resource that the RESTful API provides. Type: string (or Expression with resultType string). :type relative_url: object @@ -27210,11 +28171,11 @@ class RestServiceLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param url: Required. The base URL of the REST service. @@ -27226,12 +28187,11 @@ class RestServiceLinkedService(LinkedService): :param authentication_type: Required. Type of authentication used to connect to the REST service. Possible values include: "Anonymous", "Basic", "AadServicePrincipal", "ManagedServiceIdentity". - :type authentication_type: str or - ~data_factory_management_client.models.RestServiceAuthenticationType + :type authentication_type: str or ~azure.mgmt.datafactory.models.RestServiceAuthenticationType :param user_name: The user name used in Basic authentication type. :type user_name: object :param password: The password used in Basic authentication type. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param auth_headers: The additional HTTP headers in the request to RESTful API used for authorization. Type: object (or Expression with resultType object). :type auth_headers: object @@ -27240,7 +28200,7 @@ class RestServiceLinkedService(LinkedService): :type service_principal_id: object :param service_principal_key: The application's key used in AadServicePrincipal authentication type. - :type service_principal_key: ~data_factory_management_client.models.SecretBase + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: The tenant information (domain name or tenant ID) used in AadServicePrincipal authentication type under which your application resides. :type tenant: object @@ -27254,6 +28214,8 @@ class RestServiceLinkedService(LinkedService): encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object + :param credential: The credential reference containing authentication information. + :type credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { @@ -27281,6 +28243,7 @@ class RestServiceLinkedService(LinkedService): 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, } def __init__( @@ -27301,6 +28264,7 @@ def __init__( self.azure_cloud_type = kwargs.get('azure_cloud_type', None) self.aad_resource_id = kwargs.get('aad_resource_id', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.credential = kwargs.get('credential', None) class RestSink(CopySink): @@ -27328,6 +28292,9 @@ class RestSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param request_method: The HTTP method used to call the RESTful API. The default is POST. Type: string (or Expression with resultType string). :type request_method: object @@ -27358,6 +28325,7 @@ class RestSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'request_method': {'key': 'requestMethod', 'type': 'object'}, 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, @@ -27397,6 +28365,9 @@ class RestSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param request_method: The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). :type request_method: object @@ -27417,8 +28388,8 @@ class RestSource(CopySource): :param request_interval: The time to await before sending next page request. :type request_interval: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -27431,13 +28402,14 @@ class RestSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'request_method': {'key': 'requestMethod', 'type': 'object'}, 'request_body': {'key': 'requestBody', 'type': 'object'}, 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, 'pagination_rules': {'key': 'paginationRules', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, 'request_interval': {'key': 'requestInterval', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -27498,9 +28470,9 @@ class RunFilterParameters(msrest.serialization.Model): 'ISO 8601' format. :type last_updated_before: ~datetime.datetime :param filters: List of filters. - :type filters: list[~data_factory_management_client.models.RunQueryFilter] + :type filters: list[~azure.mgmt.datafactory.models.RunQueryFilter] :param order_by: List of OrderBy option. - :type order_by: list[~data_factory_management_client.models.RunQueryOrderBy] + :type order_by: list[~azure.mgmt.datafactory.models.RunQueryOrderBy] """ _validation = { @@ -27539,10 +28511,10 @@ class RunQueryFilter(msrest.serialization.Model): runs are TriggerName, TriggerRunTimestamp and Status. Possible values include: "PipelineName", "Status", "RunStart", "RunEnd", "ActivityName", "ActivityRunStart", "ActivityRunEnd", "ActivityType", "TriggerName", "TriggerRunTimestamp", "RunGroupId", "LatestOnly". - :type operand: str or ~data_factory_management_client.models.RunQueryFilterOperand + :type operand: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperand :param operator: Required. Operator to be used for filter. Possible values include: "Equals", "NotEquals", "In", "NotIn". - :type operator: str or ~data_factory_management_client.models.RunQueryFilterOperator + :type operator: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperator :param values: Required. List of filter values. :type values: list[str] """ @@ -27580,9 +28552,9 @@ class RunQueryOrderBy(msrest.serialization.Model): TriggerRunTimestamp and Status. Possible values include: "RunStart", "RunEnd", "PipelineName", "Status", "ActivityName", "ActivityRunStart", "ActivityRunEnd", "TriggerName", "TriggerRunTimestamp". - :type order_by: str or ~data_factory_management_client.models.RunQueryOrderByField + :type order_by: str or ~azure.mgmt.datafactory.models.RunQueryOrderByField :param order: Required. Sorting order of the parameter. Possible values include: "ASC", "DESC". - :type order: str or ~data_factory_management_client.models.RunQueryOrder + :type order: str or ~azure.mgmt.datafactory.models.RunQueryOrder """ _validation = { @@ -27615,11 +28587,11 @@ class SalesforceLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param environment_url: The URL of Salesforce instance. Default is @@ -27631,9 +28603,9 @@ class SalesforceLinkedService(LinkedService): (or Expression with resultType string). :type username: object :param password: The password for Basic authentication of the Salesforce instance. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param security_token: The security token is optional to remotely access Salesforce instance. - :type security_token: ~data_factory_management_client.models.SecretBase + :type security_token: ~azure.mgmt.datafactory.models.SecretBase :param api_version: The Salesforce API version used in ADF. Type: string (or Expression with resultType string). :type api_version: object @@ -27687,11 +28659,11 @@ class SalesforceMarketingCloudLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_properties: Properties used to connect to Salesforce Marketing Cloud. It is @@ -27702,7 +28674,7 @@ class SalesforceMarketingCloudLinkedService(LinkedService): :type client_id: object :param client_secret: The client secret associated with the Salesforce Marketing Cloud application. Type: string (or Expression with resultType string). - :type client_secret: ~data_factory_management_client.models.SecretBase + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). :type use_encrypted_endpoints: object @@ -27774,14 +28746,14 @@ class SalesforceMarketingCloudObjectDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ @@ -27832,12 +28804,15 @@ class SalesforceMarketingCloudSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -27853,8 +28828,9 @@ class SalesforceMarketingCloudSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -27886,14 +28862,14 @@ class SalesforceObjectDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param object_api_name: The Salesforce object API name. Type: string (or Expression with resultType string). :type object_api_name: object @@ -27937,11 +28913,11 @@ class SalesforceServiceCloudLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param environment_url: The URL of Salesforce Service Cloud instance. Default is @@ -27953,9 +28929,9 @@ class SalesforceServiceCloudLinkedService(LinkedService): (or Expression with resultType string). :type username: object :param password: The password for Basic authentication of the Salesforce instance. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param security_token: The security token is optional to remotely access Salesforce instance. - :type security_token: ~data_factory_management_client.models.SecretBase + :type security_token: ~azure.mgmt.datafactory.models.SecretBase :param api_version: The Salesforce API version used in ADF. Type: string (or Expression with resultType string). :type api_version: object @@ -28022,14 +28998,14 @@ class SalesforceServiceCloudObjectDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param object_api_name: The Salesforce Service Cloud object API name. Type: string (or Expression with resultType string). :type object_api_name: object @@ -28087,9 +29063,12 @@ class SalesforceServiceCloudSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param write_behavior: The write behavior for the operation. Default is Insert. Possible values include: "Insert", "Upsert". - :type write_behavior: str or ~data_factory_management_client.models.SalesforceSinkWriteBehavior + :type write_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior :param external_id_field_name: The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or Expression with resultType string). :type external_id_field_name: object @@ -28114,6 +29093,7 @@ class SalesforceServiceCloudSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, @@ -28149,14 +29129,17 @@ class SalesforceServiceCloudSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object :param read_behavior: The read behavior for the operation. Default is Query. Possible values include: "Query", "QueryAll". - :type read_behavior: str or ~data_factory_management_client.models.SalesforceSourceReadBehavior + :type read_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -28169,9 +29152,10 @@ class SalesforceServiceCloudSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -28210,9 +29194,12 @@ class SalesforceSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param write_behavior: The write behavior for the operation. Default is Insert. Possible values include: "Insert", "Upsert". - :type write_behavior: str or ~data_factory_management_client.models.SalesforceSinkWriteBehavior + :type write_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior :param external_id_field_name: The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or Expression with resultType string). :type external_id_field_name: object @@ -28237,6 +29224,7 @@ class SalesforceSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, @@ -28272,17 +29260,20 @@ class SalesforceSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object :param read_behavior: The read behavior for the operation. Default is Query. Possible values include: "Query", "QueryAll". - :type read_behavior: str or ~data_factory_management_client.models.SalesforceSourceReadBehavior + :type read_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior """ _validation = { @@ -28295,8 +29286,9 @@ class SalesforceSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, } @@ -28330,14 +29322,14 @@ class SapBwCubeDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder """ _validation = { @@ -28376,11 +29368,11 @@ class SapBwLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param server: Required. Host name of the SAP BW instance. Type: string (or Expression with @@ -28396,7 +29388,7 @@ class SapBwLinkedService(LinkedService): resultType string). :type user_name: object :param password: Password to access the SAP BW server. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -28458,12 +29450,15 @@ class SapBwSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: MDX query. Type: string (or Expression with resultType string). :type query: object """ @@ -28478,8 +29473,9 @@ class SapBwSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -28503,11 +29499,11 @@ class SapCloudForCustomerLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param url: Required. The URL of SAP Cloud for Customer OData API. For example, @@ -28518,7 +29514,7 @@ class SapCloudForCustomerLinkedService(LinkedService): resultType string). :type username: object :param password: The password for Basic authentication. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Either encryptedCredential or username/password must be provided. Type: string (or Expression with resultType string). @@ -28574,14 +29570,14 @@ class SapCloudForCustomerResourceDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param path: Required. The path of the SAP Cloud for Customer OData entity. Type: string (or Expression with resultType string). :type path: object @@ -28640,10 +29636,13 @@ class SapCloudForCustomerSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param write_behavior: The write behavior for the operation. Default is 'Insert'. Possible values include: "Insert", "Update". :type write_behavior: str or - ~data_factory_management_client.models.SapCloudForCustomerSinkWriteBehavior + ~azure.mgmt.datafactory.models.SapCloudForCustomerSinkWriteBehavior :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: @@ -28663,6 +29662,7 @@ class SapCloudForCustomerSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } @@ -28696,12 +29696,15 @@ class SapCloudForCustomerSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: SAP Cloud for Customer OData query. For example, "$top=1". Type: string (or Expression with resultType string). :type query: object @@ -28722,8 +29725,9 @@ class SapCloudForCustomerSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } @@ -28749,11 +29753,11 @@ class SapEccLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param url: Required. The URL of SAP ECC OData API. For example, @@ -28764,7 +29768,7 @@ class SapEccLinkedService(LinkedService): resultType string). :type username: str :param password: The password for Basic authentication. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Either encryptedCredential or username/password must be provided. Type: string (or Expression with resultType string). @@ -28820,14 +29824,14 @@ class SapEccResourceDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param path: Required. The path of the SAP ECC OData entity. Type: string (or Expression with resultType string). :type path: object @@ -28880,12 +29884,15 @@ class SapEccSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: SAP ECC OData query. For example, "$top=1". Type: string (or Expression with resultType string). :type query: object @@ -28906,8 +29913,9 @@ class SapEccSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } @@ -28933,11 +29941,11 @@ class SapHanaLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: SAP HANA ODBC connection string. Type: string, SecureString or @@ -28948,13 +29956,12 @@ class SapHanaLinkedService(LinkedService): :type server: object :param authentication_type: The authentication type to be used to connect to the SAP HANA server. Possible values include: "Basic", "Windows". - :type authentication_type: str or - ~data_factory_management_client.models.SapHanaAuthenticationType + :type authentication_type: str or ~azure.mgmt.datafactory.models.SapHanaAuthenticationType :param user_name: Username to access the SAP HANA server. Type: string (or Expression with resultType string). :type user_name: object :param password: Password to access the SAP HANA server. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -29033,12 +30040,15 @@ class SapHanaSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: SAP HANA Sql query. Type: string (or Expression with resultType string). :type query: object :param packet_size: The packet size of data read from SAP HANA. Type: integer(or Expression @@ -29049,7 +30059,7 @@ class SapHanaSource(TabularSource): :type partition_option: object :param partition_settings: The settings that will be leveraged for SAP HANA source partitioning. - :type partition_settings: ~data_factory_management_client.models.SapHanaPartitionSettings + :type partition_settings: ~azure.mgmt.datafactory.models.SapHanaPartitionSettings """ _validation = { @@ -29062,8 +30072,9 @@ class SapHanaSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'packet_size': {'key': 'packetSize', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'object'}, @@ -29101,14 +30112,14 @@ class SapHanaTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param schema_type_properties_schema: The schema name of SAP HANA. Type: string (or Expression with resultType string). :type schema_type_properties_schema: object @@ -29156,11 +30167,11 @@ class SapOpenHubLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param server: Host name of the SAP BW instance where the open hub destination is located. @@ -29185,7 +30196,7 @@ class SapOpenHubLinkedService(LinkedService): :type user_name: object :param password: Password to access the SAP BW server where the open hub destination is located. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param message_server: The hostname of the SAP Message Server. Type: string (or Expression with resultType string). :type message_server: object @@ -29263,12 +30274,15 @@ class SapOpenHubSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param exclude_last_request: Whether to exclude the records of the last request. The default value is true. Type: boolean (or Expression with resultType boolean). :type exclude_last_request: object @@ -29295,8 +30309,9 @@ class SapOpenHubSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'exclude_last_request': {'key': 'excludeLastRequest', 'type': 'object'}, 'base_request_id': {'key': 'baseRequestId', 'type': 'object'}, 'custom_rfc_read_table_function_module': {'key': 'customRfcReadTableFunctionModule', 'type': 'object'}, @@ -29334,14 +30349,14 @@ class SapOpenHubTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param open_hub_destination_name: Required. The name of the Open Hub Destination with destination type as Database Table. Type: string (or Expression with resultType string). :type open_hub_destination_name: object @@ -29397,11 +30412,11 @@ class SapTableLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param server: Host name of the SAP instance where the table is located. Type: string (or @@ -29425,7 +30440,7 @@ class SapTableLinkedService(LinkedService): (or Expression with resultType string). :type user_name: object :param password: Password to access the SAP server where the table is located. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param message_server: The hostname of the SAP Message Server. Type: string (or Expression with resultType string). :type message_server: object @@ -29565,14 +30580,14 @@ class SapTableResourceDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: Required. The name of the SAP Table. Type: string (or Expression with resultType string). :type table_name: object @@ -29625,12 +30640,15 @@ class SapTableSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param row_count: The number of rows to be retrieved. Type: integer(or Expression with resultType integer). :type row_count: object @@ -29659,7 +30677,7 @@ class SapTableSource(TabularSource): :type partition_option: object :param partition_settings: The settings that will be leveraged for SAP table source partitioning. - :type partition_settings: ~data_factory_management_client.models.SapTablePartitionSettings + :type partition_settings: ~azure.mgmt.datafactory.models.SapTablePartitionSettings """ _validation = { @@ -29672,8 +30690,9 @@ class SapTableSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'row_count': {'key': 'rowCount', 'type': 'object'}, 'row_skips': {'key': 'rowSkips', 'type': 'object'}, 'rfc_table_fields': {'key': 'rfcTableFields', 'type': 'object'}, @@ -29718,13 +30737,13 @@ class ScheduleTrigger(MultiplePipelineTrigger): :type description: str :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". - :vartype runtime_state: str or ~data_factory_management_client.models.TriggerRuntimeState + :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState :param annotations: List of tags that can be used for describing the trigger. :type annotations: list[object] :param pipelines: Pipelines that need to be started. - :type pipelines: list[~data_factory_management_client.models.TriggerPipelineReference] + :type pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] :param recurrence: Required. Recurrence schedule configuration. - :type recurrence: ~data_factory_management_client.models.ScheduleTriggerRecurrence + :type recurrence: ~azure.mgmt.datafactory.models.ScheduleTriggerRecurrence """ _validation = { @@ -29760,7 +30779,7 @@ class ScheduleTriggerRecurrence(msrest.serialization.Model): :type additional_properties: dict[str, object] :param frequency: The frequency. Possible values include: "NotSpecified", "Minute", "Hour", "Day", "Week", "Month", "Year". - :type frequency: str or ~data_factory_management_client.models.RecurrenceFrequency + :type frequency: str or ~azure.mgmt.datafactory.models.RecurrenceFrequency :param interval: The interval. :type interval: int :param start_time: The start time. @@ -29770,7 +30789,7 @@ class ScheduleTriggerRecurrence(msrest.serialization.Model): :param time_zone: The time zone. :type time_zone: str :param schedule: The recurrence schedule. - :type schedule: ~data_factory_management_client.models.RecurrenceSchedule + :type schedule: ~azure.mgmt.datafactory.models.RecurrenceSchedule """ _attribute_map = { @@ -29806,9 +30825,8 @@ class ScriptAction(msrest.serialization.Model): :type name: str :param uri: Required. The URI for the script action. :type uri: str - :param roles: Required. The node types on which the script action should be executed. Possible - values include: "Headnode", "Workernode", "Zookeeper". - :type roles: str or ~data_factory_management_client.models.HdiNodeTypes + :param roles: Required. The node types on which the script action should be executed. + :type roles: str :param parameters: The parameters for the script action. :type parameters: str """ @@ -29914,11 +30932,11 @@ class SelfHostedIntegrationRuntime(IntegrationRuntime): :type additional_properties: dict[str, object] :param type: Required. Type of integration runtime.Constant filled by server. Possible values include: "Managed", "SelfHosted". - :type type: str or ~data_factory_management_client.models.IntegrationRuntimeType + :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType :param description: Integration runtime description. :type description: str :param linked_info: The base definition of a linked integration runtime. - :type linked_info: ~data_factory_management_client.models.LinkedIntegrationRuntimeType + :type linked_info: ~azure.mgmt.datafactory.models.LinkedIntegrationRuntimeType """ _validation = { @@ -29958,8 +30976,7 @@ class SelfHostedIntegrationRuntimeNode(msrest.serialization.Model): :ivar status: Status of the integration runtime node. Possible values include: "NeedRegistration", "Online", "Limited", "Offline", "Upgrading", "Initializing", "InitializeFailed". - :vartype status: str or - ~data_factory_management_client.models.SelfHostedIntegrationRuntimeNodeStatus + :vartype status: str or ~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNodeStatus :ivar capabilities: The integration runtime capabilities dictionary. :vartype capabilities: dict[str, str] :ivar version_status: Status of the integration runtime node version. @@ -29981,7 +30998,7 @@ class SelfHostedIntegrationRuntimeNode(msrest.serialization.Model): :ivar last_update_result: The result of the last integration runtime node update. Possible values include: "None", "Succeed", "Fail". :vartype last_update_result: str or - ~data_factory_management_client.models.IntegrationRuntimeUpdateResult + ~azure.mgmt.datafactory.models.IntegrationRuntimeUpdateResult :ivar last_start_update_time: The last time for the integration runtime node update start. :vartype last_start_update_time: ~datetime.datetime :ivar last_end_update_time: The last time for the integration runtime node update end. @@ -30076,13 +31093,13 @@ class SelfHostedIntegrationRuntimeStatus(IntegrationRuntimeStatus): :type additional_properties: dict[str, object] :param type: Required. Type of integration runtime.Constant filled by server. Possible values include: "Managed", "SelfHosted". - :type type: str or ~data_factory_management_client.models.IntegrationRuntimeType + :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType :ivar data_factory_name: The data factory name which the integration runtime belong to. :vartype data_factory_name: str :ivar state: The state of integration runtime. Possible values include: "Initial", "Stopped", "Started", "Starting", "Stopping", "NeedRegistration", "Online", "Limited", "Offline", "AccessDenied". - :vartype state: str or ~data_factory_management_client.models.IntegrationRuntimeState + :vartype state: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeState :ivar create_time: The time at which the integration runtime was created, in ISO8601 format. :vartype create_time: ~datetime.datetime :ivar task_queue_id: The task queue id of the integration runtime. @@ -30091,11 +31108,11 @@ class SelfHostedIntegrationRuntimeStatus(IntegrationRuntimeStatus): communication channel (when more than 2 self-hosted integration runtime nodes exist). Possible values include: "NotSet", "SslEncrypted", "NotEncrypted". :vartype internal_channel_encryption: str or - ~data_factory_management_client.models.IntegrationRuntimeInternalChannelEncryptionMode + ~azure.mgmt.datafactory.models.IntegrationRuntimeInternalChannelEncryptionMode :ivar version: Version of the integration runtime. :vartype version: str :param nodes: The list of nodes for this integration runtime. - :type nodes: list[~data_factory_management_client.models.SelfHostedIntegrationRuntimeNode] + :type nodes: list[~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNode] :ivar scheduled_update_date: The date at which the integration runtime will be scheduled to update, in ISO8601 format. :vartype scheduled_update_date: ~datetime.datetime @@ -30110,13 +31127,12 @@ class SelfHostedIntegrationRuntimeStatus(IntegrationRuntimeStatus): :vartype service_urls: list[str] :ivar auto_update: Whether Self-hosted integration runtime auto update has been turned on. Possible values include: "On", "Off". - :vartype auto_update: str or - ~data_factory_management_client.models.IntegrationRuntimeAutoUpdate + :vartype auto_update: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeAutoUpdate :ivar version_status: Status of the integration runtime version. :vartype version_status: str :param links: The list of linked integration runtimes that are created to share with this integration runtime. - :type links: list[~data_factory_management_client.models.LinkedIntegrationRuntime] + :type links: list[~azure.mgmt.datafactory.models.LinkedIntegrationRuntime] :ivar pushed_version: The version that the integration runtime is going to update to. :vartype pushed_version: str :ivar latest_version: The latest version on download center. @@ -30204,11 +31220,11 @@ class ServiceNowLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param endpoint: Required. The endpoint of the ServiceNow server. (i.e. @@ -30216,18 +31232,17 @@ class ServiceNowLinkedService(LinkedService): :type endpoint: object :param authentication_type: Required. The authentication type to use. Possible values include: "Basic", "OAuth2". - :type authentication_type: str or - ~data_factory_management_client.models.ServiceNowAuthenticationType + :type authentication_type: str or ~azure.mgmt.datafactory.models.ServiceNowAuthenticationType :param username: The user name used to connect to the ServiceNow server for Basic and OAuth2 authentication. :type username: object :param password: The password corresponding to the user name for Basic and OAuth2 authentication. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param client_id: The client id for OAuth2 authentication. :type client_id: object :param client_secret: The client secret for OAuth2 authentication. - :type client_secret: ~data_factory_management_client.models.SecretBase + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. :type use_encrypted_endpoints: object @@ -30306,14 +31321,14 @@ class ServiceNowObjectDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ @@ -30364,12 +31379,15 @@ class ServiceNowSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -30385,8 +31403,9 @@ class ServiceNowSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -30399,6 +31418,53 @@ def __init__( self.query = kwargs.get('query', None) +class ServicePrincipalCredential(Credential): + """Service principal credential. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of credential.Constant filled by server. + :type type: str + :param description: Credential description. + :type description: str + :param annotations: List of tags that can be used for describing the Credential. + :type annotations: list[object] + :param service_principal_id: The app ID of the service principal used to authenticate. + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to authenticate. + :type service_principal_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param tenant: The ID of the tenant to which the service principal belongs. + :type tenant: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'AzureKeyVaultSecretReference'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(ServicePrincipalCredential, self).__init__(**kwargs) + self.type = 'ServicePrincipal' # type: str + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + + class SetVariableActivity(Activity): """Set value for a Variable. @@ -30414,9 +31480,9 @@ class SetVariableActivity(Activity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param variable_name: Name of the variable whose value needs to be set. :type variable_name: str :param value: Value to be set. Could be a static value or Expression. @@ -30499,6 +31565,9 @@ class SftpReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -30536,6 +31605,7 @@ class SftpReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -30575,11 +31645,11 @@ class SftpServerLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param host: Required. The SFTP server host name. Type: string (or Expression with resultType @@ -30590,12 +31660,12 @@ class SftpServerLinkedService(LinkedService): :type port: object :param authentication_type: The authentication type to be used to connect to the FTP server. Possible values include: "Basic", "SshPublicKey", "MultiFactor". - :type authentication_type: str or ~data_factory_management_client.models.SftpAuthenticationType + :type authentication_type: str or ~azure.mgmt.datafactory.models.SftpAuthenticationType :param user_name: The username used to log on to the SFTP server. Type: string (or Expression with resultType string). :type user_name: object :param password: Password to logon the SFTP server for Basic authentication. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -30608,10 +31678,10 @@ class SftpServerLinkedService(LinkedService): :param private_key_content: Base64 encoded SSH private key content for SshPublicKey authentication. For on-premises copy with SshPublicKey authentication, either PrivateKeyPath or PrivateKeyContent should be specified. SSH private key should be OpenSSH format. - :type private_key_content: ~data_factory_management_client.models.SecretBase + :type private_key_content: ~azure.mgmt.datafactory.models.SecretBase :param pass_phrase: The password to decrypt the SSH private key if the SSH private key is encrypted. - :type pass_phrase: ~data_factory_management_client.models.SecretBase + :type pass_phrase: ~azure.mgmt.datafactory.models.SecretBase :param skip_host_key_validation: If true, skip the SSH host key validation. Default value is false. Type: boolean (or Expression with resultType boolean). :type skip_host_key_validation: object @@ -30678,6 +31748,9 @@ class SftpWriteSettings(StoreWriteSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object :param operation_timeout: Specifies the timeout for writing each chunk to SFTP server. Default @@ -30697,6 +31770,7 @@ class SftpWriteSettings(StoreWriteSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'operation_timeout': {'key': 'operationTimeout', 'type': 'object'}, 'use_temp_file_rename': {'key': 'useTempFileRename', 'type': 'object'}, @@ -30723,11 +31797,11 @@ class SharePointOnlineListLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param site_url: Required. The URL of the SharePoint Online site. For example, @@ -30744,7 +31818,7 @@ class SharePointOnlineListLinkedService(LinkedService): :type service_principal_id: object :param service_principal_key: Required. The client secret of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). - :type service_principal_key: ~data_factory_management_client.models.SecretBase + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -30805,14 +31879,14 @@ class SharePointOnlineListResourceDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param list_name: The name of the SharePoint Online list. Type: string (or Expression with resultType string). :type list_name: object @@ -30864,6 +31938,9 @@ class SharePointOnlineListSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: The OData query to filter the data in SharePoint Online list. For example, "$top=1". Type: string (or Expression with resultType string). :type query: object @@ -30883,6 +31960,7 @@ class SharePointOnlineListSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } @@ -30908,18 +31986,18 @@ class ShopifyLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param host: Required. The endpoint of the Shopify server. (i.e. mystore.myshopify.com). :type host: object :param access_token: The API access token that can be used to access Shopify’s data. The token won't expire if it is offline mode. - :type access_token: ~data_factory_management_client.models.SecretBase + :type access_token: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. :type use_encrypted_endpoints: object @@ -30989,14 +32067,14 @@ class ShopifyObjectDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ @@ -31047,12 +32125,15 @@ class ShopifySource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -31068,8 +32149,9 @@ class ShopifySource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -31126,14 +32208,14 @@ class SnowflakeDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param schema_type_properties_schema: The schema name of the Snowflake database. Type: string (or Expression with resultType string). :type schema_type_properties_schema: object @@ -31268,18 +32350,18 @@ class SnowflakeLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: Required. The connection string of snowflake. Type: string, SecureString. :type connection_string: object :param password: The Azure key vault secret reference of password in connection string. - :type password: ~data_factory_management_client.models.AzureKeyVaultSecretReference + :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -31339,11 +32421,14 @@ class SnowflakeSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). :type pre_copy_script: object :param import_settings: Snowflake import settings. - :type import_settings: ~data_factory_management_client.models.SnowflakeImportCopyCommand + :type import_settings: ~azure.mgmt.datafactory.models.SnowflakeImportCopyCommand """ _validation = { @@ -31358,6 +32443,7 @@ class SnowflakeSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'import_settings': {'key': 'importSettings', 'type': 'SnowflakeImportCopyCommand'}, } @@ -31391,10 +32477,13 @@ class SnowflakeSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: Snowflake Sql query. Type: string (or Expression with resultType string). :type query: object :param export_settings: Snowflake export settings. - :type export_settings: ~data_factory_management_client.models.SnowflakeExportCopyCommand + :type export_settings: ~azure.mgmt.datafactory.models.SnowflakeExportCopyCommand """ _validation = { @@ -31407,6 +32496,7 @@ class SnowflakeSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'export_settings': {'key': 'exportSettings', 'type': 'SnowflakeExportCopyCommand'}, } @@ -31432,11 +32522,11 @@ class SparkLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param host: Required. IP address or host name of the Spark server. @@ -31446,21 +32536,20 @@ class SparkLinkedService(LinkedService): :type port: object :param server_type: The type of Spark server. Possible values include: "SharkServer", "SharkServer2", "SparkThriftServer". - :type server_type: str or ~data_factory_management_client.models.SparkServerType + :type server_type: str or ~azure.mgmt.datafactory.models.SparkServerType :param thrift_transport_protocol: The transport protocol to use in the Thrift layer. Possible values include: "Binary", "SASL", "HTTP ". :type thrift_transport_protocol: str or - ~data_factory_management_client.models.SparkThriftTransportProtocol + ~azure.mgmt.datafactory.models.SparkThriftTransportProtocol :param authentication_type: Required. The authentication method used to access the Spark server. Possible values include: "Anonymous", "Username", "UsernameAndPassword", "WindowsAzureHDInsightService". - :type authentication_type: str or - ~data_factory_management_client.models.SparkAuthenticationType + :type authentication_type: str or ~azure.mgmt.datafactory.models.SparkAuthenticationType :param username: The user name that you use to access Spark Server. :type username: object :param password: The password corresponding to the user name that you provided in the Username field. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param http_path: The partial URL corresponding to the Spark server. :type http_path: object :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The @@ -31556,14 +32645,14 @@ class SparkObjectDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. :type table_name: object @@ -31624,12 +32713,15 @@ class SparkSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -31645,8 +32737,9 @@ class SparkSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -31668,13 +32761,13 @@ class SqlAlwaysEncryptedProperties(msrest.serialization.Model): Type: string (or Expression with resultType string). Possible values include: "ServicePrincipal", "ManagedIdentity". :type always_encrypted_akv_auth_type: str or - ~data_factory_management_client.models.SqlAlwaysEncryptedAkvAuthType + ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedAkvAuthType :param service_principal_id: The client ID of the application in Azure Active Directory used for Azure Key Vault authentication. Type: string (or Expression with resultType string). :type service_principal_id: object :param service_principal_key: The key of the service principal used to authenticate against Azure Key Vault. - :type service_principal_key: ~data_factory_management_client.models.SecretBase + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase """ _validation = { @@ -31722,6 +32815,9 @@ class SqlDwSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). :type pre_copy_script: object @@ -31729,16 +32825,24 @@ class SqlDwSink(CopySink): applicable. Type: boolean (or Expression with resultType boolean). :type allow_poly_base: object :param poly_base_settings: Specifies PolyBase-related settings when allowPolyBase is true. - :type poly_base_settings: ~data_factory_management_client.models.PolybaseSettings + :type poly_base_settings: ~azure.mgmt.datafactory.models.PolybaseSettings :param allow_copy_command: Indicates to use Copy Command to copy data into SQL Data Warehouse. Type: boolean (or Expression with resultType boolean). :type allow_copy_command: object :param copy_command_settings: Specifies Copy Command related settings when allowCopyCommand is true. - :type copy_command_settings: ~data_factory_management_client.models.DwCopyCommandSettings + :type copy_command_settings: ~azure.mgmt.datafactory.models.DwCopyCommandSettings :param table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). :type table_option: object + :param sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or + Expression with resultType boolean). + :type sql_writer_use_table_lock: object + :param write_behavior: Write behavior when copying data into azure SQL DW. Type: + SqlDWWriteBehaviorEnum (or Expression with resultType SqlDWWriteBehaviorEnum). + :type write_behavior: object + :param upsert_settings: SQL DW upsert settings. + :type upsert_settings: ~azure.mgmt.datafactory.models.SqlDwUpsertSettings """ _validation = { @@ -31753,12 +32857,16 @@ class SqlDwSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'allow_poly_base': {'key': 'allowPolyBase', 'type': 'object'}, 'poly_base_settings': {'key': 'polyBaseSettings', 'type': 'PolybaseSettings'}, 'allow_copy_command': {'key': 'allowCopyCommand', 'type': 'object'}, 'copy_command_settings': {'key': 'copyCommandSettings', 'type': 'DwCopyCommandSettings'}, 'table_option': {'key': 'tableOption', 'type': 'object'}, + 'sql_writer_use_table_lock': {'key': 'sqlWriterUseTableLock', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + 'upsert_settings': {'key': 'upsertSettings', 'type': 'SqlDwUpsertSettings'}, } def __init__( @@ -31773,6 +32881,9 @@ def __init__( self.allow_copy_command = kwargs.get('allow_copy_command', None) self.copy_command_settings = kwargs.get('copy_command_settings', None) self.table_option = kwargs.get('table_option', None) + self.sql_writer_use_table_lock = kwargs.get('sql_writer_use_table_lock', None) + self.write_behavior = kwargs.get('write_behavior', None) + self.upsert_settings = kwargs.get('upsert_settings', None) class SqlDwSource(TabularSource): @@ -31794,12 +32905,15 @@ class SqlDwSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param sql_reader_query: SQL Data Warehouse reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object @@ -31815,7 +32929,7 @@ class SqlDwSource(TabularSource): Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". :type partition_option: object :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~data_factory_management_client.models.SqlPartitionSettings + :type partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings """ _validation = { @@ -31828,8 +32942,9 @@ class SqlDwSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': 'object'}, @@ -31850,6 +32965,31 @@ def __init__( self.partition_settings = kwargs.get('partition_settings', None) +class SqlDwUpsertSettings(msrest.serialization.Model): + """Sql DW upsert option settings. + + :param interim_schema_name: Schema name for interim table. Type: string (or Expression with + resultType string). + :type interim_schema_name: object + :param keys: Key column names for unique row identification. Type: array of strings (or + Expression with resultType array of strings). + :type keys: object + """ + + _attribute_map = { + 'interim_schema_name': {'key': 'interimSchemaName', 'type': 'object'}, + 'keys': {'key': 'keys', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(SqlDwUpsertSettings, self).__init__(**kwargs) + self.interim_schema_name = kwargs.get('interim_schema_name', None) + self.keys = kwargs.get('keys', None) + + class SqlMiSink(CopySink): """A copy activity Azure SQL Managed Instance sink. @@ -31875,6 +33015,9 @@ class SqlMiSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). :type sql_writer_stored_procedure_name: object @@ -31886,13 +33029,21 @@ class SqlMiSink(CopySink): :type pre_copy_script: object :param stored_procedure_parameters: SQL stored procedure parameters. :type stored_procedure_parameters: dict[str, - ~data_factory_management_client.models.StoredProcedureParameter] + ~azure.mgmt.datafactory.models.StoredProcedureParameter] :param stored_procedure_table_type_parameter_name: The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). :type stored_procedure_table_type_parameter_name: object :param table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). :type table_option: object + :param sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or + Expression with resultType boolean). + :type sql_writer_use_table_lock: object + :param write_behavior: White behavior when copying data into azure SQL MI. Type: + SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum). + :type write_behavior: object + :param upsert_settings: SQL upsert settings. + :type upsert_settings: ~azure.mgmt.datafactory.models.SqlUpsertSettings """ _validation = { @@ -31907,12 +33058,16 @@ class SqlMiSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, 'table_option': {'key': 'tableOption', 'type': 'object'}, + 'sql_writer_use_table_lock': {'key': 'sqlWriterUseTableLock', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + 'upsert_settings': {'key': 'upsertSettings', 'type': 'SqlUpsertSettings'}, } def __init__( @@ -31927,6 +33082,9 @@ def __init__( self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) self.table_option = kwargs.get('table_option', None) + self.sql_writer_use_table_lock = kwargs.get('sql_writer_use_table_lock', None) + self.write_behavior = kwargs.get('write_behavior', None) + self.upsert_settings = kwargs.get('upsert_settings', None) class SqlMiSource(TabularSource): @@ -31948,12 +33106,15 @@ class SqlMiSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object :param sql_reader_stored_procedure_name: Name of the stored procedure for a Azure SQL Managed @@ -31963,14 +33124,14 @@ class SqlMiSource(TabularSource): :param stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". :type stored_procedure_parameters: dict[str, - ~data_factory_management_client.models.StoredProcedureParameter] + ~azure.mgmt.datafactory.models.StoredProcedureParameter] :param produce_additional_types: Which additional types to produce. :type produce_additional_types: object :param partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". :type partition_option: object :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~data_factory_management_client.models.SqlPartitionSettings + :type partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings """ _validation = { @@ -31983,8 +33144,9 @@ class SqlMiSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, @@ -32053,11 +33215,11 @@ class SqlServerLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: Required. The connection string. Type: string, SecureString or @@ -32067,14 +33229,13 @@ class SqlServerLinkedService(LinkedService): with resultType string). :type user_name: object :param password: The on-premises Windows authentication password. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object :param always_encrypted_settings: Sql always encrypted properties. - :type always_encrypted_settings: - ~data_factory_management_client.models.SqlAlwaysEncryptedProperties + :type always_encrypted_settings: ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties """ _validation = { @@ -32134,6 +33295,9 @@ class SqlServerSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). :type sql_writer_stored_procedure_name: object @@ -32145,13 +33309,21 @@ class SqlServerSink(CopySink): :type pre_copy_script: object :param stored_procedure_parameters: SQL stored procedure parameters. :type stored_procedure_parameters: dict[str, - ~data_factory_management_client.models.StoredProcedureParameter] + ~azure.mgmt.datafactory.models.StoredProcedureParameter] :param stored_procedure_table_type_parameter_name: The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). :type stored_procedure_table_type_parameter_name: object :param table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). :type table_option: object + :param sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or + Expression with resultType boolean). + :type sql_writer_use_table_lock: object + :param write_behavior: Write behavior when copying data into sql server. Type: + SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum). + :type write_behavior: object + :param upsert_settings: SQL upsert settings. + :type upsert_settings: ~azure.mgmt.datafactory.models.SqlUpsertSettings """ _validation = { @@ -32166,12 +33338,16 @@ class SqlServerSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, 'table_option': {'key': 'tableOption', 'type': 'object'}, + 'sql_writer_use_table_lock': {'key': 'sqlWriterUseTableLock', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + 'upsert_settings': {'key': 'upsertSettings', 'type': 'SqlUpsertSettings'}, } def __init__( @@ -32186,6 +33362,9 @@ def __init__( self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) self.table_option = kwargs.get('table_option', None) + self.sql_writer_use_table_lock = kwargs.get('sql_writer_use_table_lock', None) + self.write_behavior = kwargs.get('write_behavior', None) + self.upsert_settings = kwargs.get('upsert_settings', None) class SqlServerSource(TabularSource): @@ -32207,12 +33386,15 @@ class SqlServerSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database @@ -32222,14 +33404,14 @@ class SqlServerSource(TabularSource): :param stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". :type stored_procedure_parameters: dict[str, - ~data_factory_management_client.models.StoredProcedureParameter] + ~azure.mgmt.datafactory.models.StoredProcedureParameter] :param produce_additional_types: Which additional types to produce. :type produce_additional_types: object :param partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". :type partition_option: object :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~data_factory_management_client.models.SqlPartitionSettings + :type partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings """ _validation = { @@ -32242,8 +33424,9 @@ class SqlServerSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, @@ -32281,20 +33464,20 @@ class SqlServerStoredProcedureActivity(ExecutionActivity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param linked_service_name: Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. - :type policy: ~data_factory_management_client.models.ActivityPolicy + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param stored_procedure_name: Required. Stored procedure name. Type: string (or Expression with resultType string). :type stored_procedure_name: object :param stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". :type stored_procedure_parameters: dict[str, - ~data_factory_management_client.models.StoredProcedureParameter] + ~azure.mgmt.datafactory.models.StoredProcedureParameter] """ _validation = { @@ -32345,14 +33528,14 @@ class SqlServerTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. :type table_name: object @@ -32420,6 +33603,9 @@ class SqlSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). :type sql_writer_stored_procedure_name: object @@ -32431,13 +33617,21 @@ class SqlSink(CopySink): :type pre_copy_script: object :param stored_procedure_parameters: SQL stored procedure parameters. :type stored_procedure_parameters: dict[str, - ~data_factory_management_client.models.StoredProcedureParameter] + ~azure.mgmt.datafactory.models.StoredProcedureParameter] :param stored_procedure_table_type_parameter_name: The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). :type stored_procedure_table_type_parameter_name: object :param table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). :type table_option: object + :param sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or + Expression with resultType boolean). + :type sql_writer_use_table_lock: object + :param write_behavior: Write behavior when copying data into sql. Type: SqlWriteBehaviorEnum + (or Expression with resultType SqlWriteBehaviorEnum). + :type write_behavior: object + :param upsert_settings: SQL upsert settings. + :type upsert_settings: ~azure.mgmt.datafactory.models.SqlUpsertSettings """ _validation = { @@ -32452,12 +33646,16 @@ class SqlSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, 'table_option': {'key': 'tableOption', 'type': 'object'}, + 'sql_writer_use_table_lock': {'key': 'sqlWriterUseTableLock', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + 'upsert_settings': {'key': 'upsertSettings', 'type': 'SqlUpsertSettings'}, } def __init__( @@ -32472,6 +33670,9 @@ def __init__( self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) self.table_option = kwargs.get('table_option', None) + self.sql_writer_use_table_lock = kwargs.get('sql_writer_use_table_lock', None) + self.write_behavior = kwargs.get('write_behavior', None) + self.upsert_settings = kwargs.get('upsert_settings', None) class SqlSource(TabularSource): @@ -32493,12 +33694,15 @@ class SqlSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database @@ -32508,7 +33712,7 @@ class SqlSource(TabularSource): :param stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". :type stored_procedure_parameters: dict[str, - ~data_factory_management_client.models.StoredProcedureParameter] + ~azure.mgmt.datafactory.models.StoredProcedureParameter] :param isolation_level: Specifies the transaction locking behavior for the SQL source. Allowed values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value is ReadCommitted. Type: string (or Expression with resultType string). @@ -32517,7 +33721,7 @@ class SqlSource(TabularSource): Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". :type partition_option: object :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~data_factory_management_client.models.SqlPartitionSettings + :type partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings """ _validation = { @@ -32530,8 +33734,9 @@ class SqlSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, @@ -32554,6 +33759,36 @@ def __init__( self.partition_settings = kwargs.get('partition_settings', None) +class SqlUpsertSettings(msrest.serialization.Model): + """Sql upsert option settings. + + :param use_temp_db: Specifies whether to use temp db for upsert interim table. Type: boolean + (or Expression with resultType boolean). + :type use_temp_db: object + :param interim_schema_name: Schema name for interim table. Type: string (or Expression with + resultType string). + :type interim_schema_name: object + :param keys: Key column names for unique row identification. Type: array of strings (or + Expression with resultType array of strings). + :type keys: object + """ + + _attribute_map = { + 'use_temp_db': {'key': 'useTempDB', 'type': 'object'}, + 'interim_schema_name': {'key': 'interimSchemaName', 'type': 'object'}, + 'keys': {'key': 'keys', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(SqlUpsertSettings, self).__init__(**kwargs) + self.use_temp_db = kwargs.get('use_temp_db', None) + self.interim_schema_name = kwargs.get('interim_schema_name', None) + self.keys = kwargs.get('keys', None) + + class SquareLinkedService(LinkedService): """Square Service linked service. @@ -32565,11 +33800,11 @@ class SquareLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_properties: Properties used to connect to Square. It is mutually exclusive @@ -32580,7 +33815,7 @@ class SquareLinkedService(LinkedService): :param client_id: The client ID associated with your Square application. :type client_id: object :param client_secret: The client secret associated with your Square application. - :type client_secret: ~data_factory_management_client.models.SecretBase + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase :param redirect_uri: The redirect URL assigned in the Square application dashboard. (i.e. http://localhost:2500). :type redirect_uri: object @@ -32658,14 +33893,14 @@ class SquareObjectDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ @@ -32716,12 +33951,15 @@ class SquareSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -32737,8 +33975,9 @@ class SquareSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -32761,7 +34000,7 @@ class SsisAccessCredential(msrest.serialization.Model): :param user_name: Required. UseName for windows authentication. :type user_name: object :param password: Required. Password for windows authentication. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase """ _validation = { @@ -32836,7 +34075,7 @@ class SsisObjectMetadata(msrest.serialization.Model): :param type: Required. Type of metadata.Constant filled by server. Possible values include: "Folder", "Project", "Package", "Environment". - :type type: str or ~data_factory_management_client.models.SsisObjectMetadataType + :type type: str or ~azure.mgmt.datafactory.models.SsisObjectMetadataType :param id: Metadata id. :type id: long :param name: Metadata name. @@ -32878,7 +34117,7 @@ class SsisEnvironment(SsisObjectMetadata): :param type: Required. Type of metadata.Constant filled by server. Possible values include: "Folder", "Project", "Package", "Environment". - :type type: str or ~data_factory_management_client.models.SsisObjectMetadataType + :type type: str or ~azure.mgmt.datafactory.models.SsisObjectMetadataType :param id: Metadata id. :type id: long :param name: Metadata name. @@ -32888,7 +34127,7 @@ class SsisEnvironment(SsisObjectMetadata): :param folder_id: Folder id which contains environment. :type folder_id: long :param variables: Variable in environment. - :type variables: list[~data_factory_management_client.models.SsisVariable] + :type variables: list[~azure.mgmt.datafactory.models.SsisVariable] """ _validation = { @@ -32955,7 +34194,7 @@ class SsisExecutionCredential(msrest.serialization.Model): :param user_name: Required. UseName for windows authentication. :type user_name: object :param password: Required. Password for windows authentication. - :type password: ~data_factory_management_client.models.SecureString + :type password: ~azure.mgmt.datafactory.models.SecureString """ _validation = { @@ -33013,7 +34252,7 @@ class SsisFolder(SsisObjectMetadata): :param type: Required. Type of metadata.Constant filled by server. Possible values include: "Folder", "Project", "Package", "Environment". - :type type: str or ~data_factory_management_client.models.SsisObjectMetadataType + :type type: str or ~azure.mgmt.datafactory.models.SsisObjectMetadataType :param id: Metadata id. :type id: long :param name: Metadata name. @@ -33050,9 +34289,9 @@ class SsisLogLocation(msrest.serialization.Model): with resultType string). :type log_path: object :param type: Required. The type of SSIS log location. Possible values include: "File". - :type type: str or ~data_factory_management_client.models.SsisLogLocationType + :type type: str or ~azure.mgmt.datafactory.models.SsisLogLocationType :param access_credential: The package execution log access credential. - :type access_credential: ~data_factory_management_client.models.SsisAccessCredential + :type access_credential: ~azure.mgmt.datafactory.models.SsisAccessCredential :param log_refresh_interval: Specifies the interval to refresh log. The default interval is 5 minutes. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). @@ -33086,7 +34325,7 @@ class SsisObjectMetadataListResponse(msrest.serialization.Model): """A list of SSIS object metadata. :param value: List of SSIS object metadata. - :type value: list[~data_factory_management_client.models.SsisObjectMetadata] + :type value: list[~azure.mgmt.datafactory.models.SsisObjectMetadata] :param next_link: The link to the next page of results, if any remaining results exist. :type next_link: str """ @@ -33143,7 +34382,7 @@ class SsisPackage(SsisObjectMetadata): :param type: Required. Type of metadata.Constant filled by server. Possible values include: "Folder", "Project", "Package", "Environment". - :type type: str or ~data_factory_management_client.models.SsisObjectMetadataType + :type type: str or ~azure.mgmt.datafactory.models.SsisObjectMetadataType :param id: Metadata id. :type id: long :param name: Metadata name. @@ -33157,7 +34396,7 @@ class SsisPackage(SsisObjectMetadata): :param project_id: Project id which contains package. :type project_id: long :param parameters: Parameters in package. - :type parameters: list[~data_factory_management_client.models.SsisParameter] + :type parameters: list[~azure.mgmt.datafactory.models.SsisParameter] """ _validation = { @@ -33195,17 +34434,16 @@ class SsisPackageLocation(msrest.serialization.Model): :type package_path: object :param type: The type of SSIS package location. Possible values include: "SSISDB", "File", "InlinePackage", "PackageStore". - :type type: str or ~data_factory_management_client.models.SsisPackageLocationType + :type type: str or ~azure.mgmt.datafactory.models.SsisPackageLocationType :param package_password: Password of the package. - :type package_password: ~data_factory_management_client.models.SecretBase + :type package_password: ~azure.mgmt.datafactory.models.SecretBase :param access_credential: The package access credential. - :type access_credential: ~data_factory_management_client.models.SsisAccessCredential + :type access_credential: ~azure.mgmt.datafactory.models.SsisAccessCredential :param configuration_path: The configuration file of the package execution. Type: string (or Expression with resultType string). :type configuration_path: object :param configuration_access_credential: The configuration file access credential. - :type configuration_access_credential: - ~data_factory_management_client.models.SsisAccessCredential + :type configuration_access_credential: ~azure.mgmt.datafactory.models.SsisAccessCredential :param package_name: The package name. :type package_name: str :param package_content: The embedded package content. Type: string (or Expression with @@ -33214,7 +34452,7 @@ class SsisPackageLocation(msrest.serialization.Model): :param package_last_modified_date: The embedded package last modified date. :type package_last_modified_date: str :param child_packages: The embedded child package list. - :type child_packages: list[~data_factory_management_client.models.SsisChildPackage] + :type child_packages: list[~azure.mgmt.datafactory.models.SsisChildPackage] """ _attribute_map = { @@ -33317,7 +34555,7 @@ class SsisProject(SsisObjectMetadata): :param type: Required. Type of metadata.Constant filled by server. Possible values include: "Folder", "Project", "Package", "Environment". - :type type: str or ~data_factory_management_client.models.SsisObjectMetadataType + :type type: str or ~azure.mgmt.datafactory.models.SsisObjectMetadataType :param id: Metadata id. :type id: long :param name: Metadata name. @@ -33329,9 +34567,9 @@ class SsisProject(SsisObjectMetadata): :param version: Project version. :type version: long :param environment_refs: Environment reference in project. - :type environment_refs: list[~data_factory_management_client.models.SsisEnvironmentReference] + :type environment_refs: list[~azure.mgmt.datafactory.models.SsisEnvironmentReference] :param parameters: Parameters in project. - :type parameters: list[~data_factory_management_client.models.SsisParameter] + :type parameters: list[~azure.mgmt.datafactory.models.SsisParameter] """ _validation = { @@ -33444,7 +34682,7 @@ class StagingSettings(msrest.serialization.Model): collection. :type additional_properties: dict[str, object] :param linked_service_name: Required. Staging linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param path: The path to storage for storing the interim data. Type: string (or Expression with resultType string). :type path: object @@ -33483,7 +34721,7 @@ class StoredProcedureParameter(msrest.serialization.Model): :type value: object :param type: Stored procedure parameter type. Possible values include: "String", "Int", "Int64", "Decimal", "Guid", "Boolean", "Date". - :type type: str or ~data_factory_management_client.models.StoredProcedureParameterType + :type type: str or ~azure.mgmt.datafactory.models.StoredProcedureParameterType """ _attribute_map = { @@ -33515,19 +34753,19 @@ class SwitchActivity(Activity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param on: Required. An expression that would evaluate to a string or integer. This is used to determine the block of activities in cases that will be executed. - :type on: ~data_factory_management_client.models.Expression + :type on: ~azure.mgmt.datafactory.models.Expression :param cases: List of cases that correspond to expected values of the 'on' property. This is an optional property and if not provided, the activity will execute activities provided in defaultActivities. - :type cases: list[~data_factory_management_client.models.SwitchCase] + :type cases: list[~azure.mgmt.datafactory.models.SwitchCase] :param default_activities: List of activities to execute if no case condition is satisfied. This is an optional property and if not provided, the activity will exit without any action. - :type default_activities: list[~data_factory_management_client.models.Activity] + :type default_activities: list[~azure.mgmt.datafactory.models.Activity] """ _validation = { @@ -33565,7 +34803,7 @@ class SwitchCase(msrest.serialization.Model): :param value: Expected value that satisfies the expression result of the 'on' property. :type value: str :param activities: List of activities to execute for satisfied case condition. - :type activities: list[~data_factory_management_client.models.Activity] + :type activities: list[~azure.mgmt.datafactory.models.Activity] """ _attribute_map = { @@ -33593,11 +34831,11 @@ class SybaseLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param server: Required. Server name for connection. Type: string (or Expression with @@ -33610,13 +34848,12 @@ class SybaseLinkedService(LinkedService): :type schema: object :param authentication_type: AuthenticationType to be used for connection. Possible values include: "Basic", "Windows". - :type authentication_type: str or - ~data_factory_management_client.models.SybaseAuthenticationType + :type authentication_type: str or ~azure.mgmt.datafactory.models.SybaseAuthenticationType :param username: Username for authentication. Type: string (or Expression with resultType string). :type username: object :param password: Password for authentication. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -33679,12 +34916,15 @@ class SybaseSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object """ @@ -33699,8 +34939,9 @@ class SybaseSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -33732,14 +34973,14 @@ class SybaseTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The Sybase table name. Type: string (or Expression with resultType string). :type table_name: object """ @@ -33806,7 +35047,7 @@ class TabularTranslator(CopyTranslator): activity. Type: boolean (or Expression with resultType boolean). :type type_conversion: object :param type_conversion_settings: Type conversion settings. - :type type_conversion_settings: ~data_factory_management_client.models.TypeConversionSettings + :type type_conversion_settings: ~azure.mgmt.datafactory.models.TypeConversionSettings """ _validation = { @@ -33919,11 +35160,11 @@ class TeradataLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: Teradata ODBC connection string. Type: string, SecureString or @@ -33933,13 +35174,12 @@ class TeradataLinkedService(LinkedService): :type server: object :param authentication_type: AuthenticationType to be used for connection. Possible values include: "Basic", "Windows". - :type authentication_type: str or - ~data_factory_management_client.models.TeradataAuthenticationType + :type authentication_type: str or ~azure.mgmt.datafactory.models.TeradataAuthenticationType :param username: Username for authentication. Type: string (or Expression with resultType string). :type username: object :param password: Password for authentication. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -34030,12 +35270,15 @@ class TeradataSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: Teradata query. Type: string (or Expression with resultType string). :type query: object :param partition_option: The partition mechanism that will be used for teradata read in @@ -34043,7 +35286,7 @@ class TeradataSource(TabularSource): :type partition_option: object :param partition_settings: The settings that will be leveraged for teradata source partitioning. - :type partition_settings: ~data_factory_management_client.models.TeradataPartitionSettings + :type partition_settings: ~azure.mgmt.datafactory.models.TeradataPartitionSettings """ _validation = { @@ -34056,8 +35299,9 @@ class TeradataSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'object'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'TeradataPartitionSettings'}, @@ -34093,14 +35337,14 @@ class TeradataTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param database: The database name of Teradata. Type: string (or Expression with resultType string). :type database: object @@ -34228,7 +35472,7 @@ class TriggerDependencyReference(DependencyReference): :param type: Required. The type of dependency reference.Constant filled by server. :type type: str :param reference_trigger: Required. Referenced trigger. - :type reference_trigger: ~data_factory_management_client.models.TriggerReference + :type reference_trigger: ~azure.mgmt.datafactory.models.TriggerReference """ _validation = { @@ -34285,7 +35529,7 @@ class TriggerListResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. :param value: Required. List of triggers. - :type value: list[~data_factory_management_client.models.TriggerResource] + :type value: list[~azure.mgmt.datafactory.models.TriggerResource] :param next_link: The link to the next page of results, if any remaining results exist. :type next_link: str """ @@ -34312,7 +35556,7 @@ class TriggerPipelineReference(msrest.serialization.Model): """Pipeline that needs to be triggered with the given parameters. :param pipeline_reference: Pipeline reference. - :type pipeline_reference: ~data_factory_management_client.models.PipelineReference + :type pipeline_reference: ~azure.mgmt.datafactory.models.PipelineReference :param parameters: Pipeline parameters. :type parameters: dict[str, object] """ @@ -34337,7 +35581,7 @@ class TriggerQueryResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. :param value: Required. List of triggers. - :type value: list[~data_factory_management_client.models.TriggerResource] + :type value: list[~azure.mgmt.datafactory.models.TriggerResource] :param continuation_token: The continuation token for getting the next page of results, if any remaining results exist, null otherwise. :type continuation_token: str @@ -34410,7 +35654,7 @@ class TriggerResource(SubResource): :ivar etag: Etag identifies change in the resource. :vartype etag: str :param properties: Required. Properties of the trigger. - :type properties: ~data_factory_management_client.models.Trigger + :type properties: ~azure.mgmt.datafactory.models.Trigger """ _validation = { @@ -34454,7 +35698,7 @@ class TriggerRun(msrest.serialization.Model): :ivar trigger_run_timestamp: Trigger run start time. :vartype trigger_run_timestamp: ~datetime.datetime :ivar status: Trigger run status. Possible values include: "Succeeded", "Failed", "Inprogress". - :vartype status: str or ~data_factory_management_client.models.TriggerRunStatus + :vartype status: str or ~azure.mgmt.datafactory.models.TriggerRunStatus :ivar message: Trigger error message. :vartype message: str :ivar properties: List of property name and value related to trigger run. Name, value pair @@ -34519,7 +35763,7 @@ class TriggerRunsQueryResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. :param value: Required. List of trigger runs. - :type value: list[~data_factory_management_client.models.TriggerRun] + :type value: list[~azure.mgmt.datafactory.models.TriggerRun] :param continuation_token: The continuation token for getting the next page of results, if any remaining results exist, null otherwise. :type continuation_token: str @@ -34552,7 +35796,7 @@ class TriggerSubscriptionOperationStatus(msrest.serialization.Model): :vartype trigger_name: str :ivar status: Event Subscription Status. Possible values include: "Enabled", "Provisioning", "Deprovisioning", "Disabled", "Unknown". - :vartype status: str or ~data_factory_management_client.models.EventSubscriptionStatus + :vartype status: str or ~azure.mgmt.datafactory.models.EventSubscriptionStatus """ _validation = { @@ -34590,15 +35834,15 @@ class TumblingWindowTrigger(Trigger): :type description: str :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". - :vartype runtime_state: str or ~data_factory_management_client.models.TriggerRuntimeState + :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState :param annotations: List of tags that can be used for describing the trigger. :type annotations: list[object] :param pipeline: Required. Pipeline for which runs are created when an event is fired for trigger window that is ready. - :type pipeline: ~data_factory_management_client.models.TriggerPipelineReference + :type pipeline: ~azure.mgmt.datafactory.models.TriggerPipelineReference :param frequency: Required. The frequency of the time windows. Possible values include: "Minute", "Hour", "Month". - :type frequency: str or ~data_factory_management_client.models.TumblingWindowFrequency + :type frequency: str or ~azure.mgmt.datafactory.models.TumblingWindowFrequency :param interval: Required. The interval of the time windows. The minimum interval allowed is 15 Minutes. :type interval: int @@ -34616,10 +35860,10 @@ class TumblingWindowTrigger(Trigger): for which a new run is triggered. :type max_concurrency: int :param retry_policy: Retry policy that will be applied for failed pipeline runs. - :type retry_policy: ~data_factory_management_client.models.RetryPolicy + :type retry_policy: ~azure.mgmt.datafactory.models.RetryPolicy :param depends_on: Triggers that this trigger depends on. Only tumbling window triggers are supported. - :type depends_on: list[~data_factory_management_client.models.DependencyReference] + :type depends_on: list[~azure.mgmt.datafactory.models.DependencyReference] """ _validation = { @@ -34674,7 +35918,7 @@ class TumblingWindowTriggerDependencyReference(TriggerDependencyReference): :param type: Required. The type of dependency reference.Constant filled by server. :type type: str :param reference_trigger: Required. Referenced trigger. - :type reference_trigger: ~data_factory_management_client.models.TriggerReference + :type reference_trigger: ~azure.mgmt.datafactory.models.TriggerReference :param offset: Timespan applied to the start time of a tumbling window when evaluating dependency. :type offset: str @@ -34767,12 +36011,12 @@ class UntilActivity(Activity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param expression: Required. An expression that would evaluate to Boolean. The loop will continue until this expression evaluates to true. - :type expression: ~data_factory_management_client.models.Expression + :type expression: ~azure.mgmt.datafactory.models.Expression :param timeout: Specifies the timeout for the activity to run. If there is no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 week as default. Type: string (or Expression with resultType string), pattern: @@ -34780,7 +36024,7 @@ class UntilActivity(Activity): resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type timeout: object :param activities: Required. List of activities to execute. - :type activities: list[~data_factory_management_client.models.Activity] + :type activities: list[~azure.mgmt.datafactory.models.Activity] """ _validation = { @@ -34843,7 +36087,7 @@ class UpdateIntegrationRuntimeRequest(msrest.serialization.Model): :param auto_update: Enables or disables the auto-update feature of the self-hosted integration runtime. See https://go.microsoft.com/fwlink/?linkid=854189. Possible values include: "On", "Off". - :type auto_update: str or ~data_factory_management_client.models.IntegrationRuntimeAutoUpdate + :type auto_update: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeAutoUpdate :param update_delay_offset: The time offset (in hours) in the day, e.g., PT03H is 3 hours. The integration runtime auto update will happen on that time. :type update_delay_offset: str @@ -34948,9 +36192,9 @@ class ValidationActivity(Activity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param timeout: Specifies the timeout for the activity to run. If there is no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 week as default. Type: string (or Expression with resultType string), pattern: @@ -34967,7 +36211,7 @@ class ValidationActivity(Activity): with resultType boolean). :type child_items: object :param dataset: Required. Validation activity dataset reference. - :type dataset: ~data_factory_management_client.models.DatasetReference + :type dataset: ~azure.mgmt.datafactory.models.DatasetReference """ _validation = { @@ -35009,7 +36253,7 @@ class VariableSpecification(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. :param type: Required. Variable type. Possible values include: "String", "Bool", "Array". - :type type: str or ~data_factory_management_client.models.VariableType + :type type: str or ~azure.mgmt.datafactory.models.VariableType :param default_value: Default value of variable. :type default_value: object """ @@ -35043,18 +36287,18 @@ class VerticaLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param pwd: The Azure key vault secret reference of password in connection string. - :type pwd: ~data_factory_management_client.models.AzureKeyVaultSecretReference + :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -35107,12 +36351,15 @@ class VerticaSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -35128,8 +36375,9 @@ class VerticaSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -35161,14 +36409,14 @@ class VerticaTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. :type table_name: object @@ -35226,9 +36474,9 @@ class WaitActivity(Activity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param wait_time_in_seconds: Required. Duration in seconds. :type wait_time_in_seconds: object """ @@ -35273,16 +36521,16 @@ class WebActivity(ExecutionActivity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param linked_service_name: Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. - :type policy: ~data_factory_management_client.models.ActivityPolicy + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param method: Required. Rest API method for target endpoint. Possible values include: "GET", "POST", "PUT", "DELETE". - :type method: str or ~data_factory_management_client.models.WebActivityMethod + :type method: str or ~azure.mgmt.datafactory.models.WebActivityMethod :param url: Required. Web activity target endpoint and path. Type: string (or Expression with resultType string). :type url: object @@ -35294,13 +36542,13 @@ class WebActivity(ExecutionActivity): method, not allowed for GET method Type: string (or Expression with resultType string). :type body: object :param authentication: Authentication method used for calling the endpoint. - :type authentication: ~data_factory_management_client.models.WebActivityAuthentication + :type authentication: ~azure.mgmt.datafactory.models.WebActivityAuthentication :param datasets: List of datasets passed to web endpoint. - :type datasets: list[~data_factory_management_client.models.DatasetReference] + :type datasets: list[~azure.mgmt.datafactory.models.DatasetReference] :param linked_services: List of linked services passed to web endpoint. - :type linked_services: list[~data_factory_management_client.models.LinkedServiceReference] + :type linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference """ _validation = { @@ -35348,32 +36596,27 @@ def __init__( class WebActivityAuthentication(msrest.serialization.Model): """Web activity authentication properties. - All required parameters must be populated in order to send to Azure. - - :param type: Required. Web activity authentication - (Basic/ClientCertificate/MSI/ServicePrincipal). + :param type: Web activity authentication (Basic/ClientCertificate/MSI/ServicePrincipal). :type type: str :param pfx: Base64-encoded contents of a PFX file or Certificate when used for ServicePrincipal. - :type pfx: ~data_factory_management_client.models.SecretBase + :type pfx: ~azure.mgmt.datafactory.models.SecretBase :param username: Web activity authentication user name for basic authentication or ClientID when used for ServicePrincipal. Type: string (or Expression with resultType string). :type username: object :param password: Password for the PFX file or basic authentication / Secret when used for ServicePrincipal. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param resource: Resource for which Azure Auth token will be requested when using MSI Authentication. Type: string (or Expression with resultType string). :type resource: object :param user_tenant: TenantId for which Azure Auth token will be requested when using ServicePrincipal Authentication. Type: string (or Expression with resultType string). :type user_tenant: object + :param credential: The credential reference containing authentication information. + :type credential: ~azure.mgmt.datafactory.models.CredentialReference """ - _validation = { - 'type': {'required': True}, - } - _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'pfx': {'key': 'pfx', 'type': 'SecretBase'}, @@ -35381,6 +36624,7 @@ class WebActivityAuthentication(msrest.serialization.Model): 'password': {'key': 'password', 'type': 'SecretBase'}, 'resource': {'key': 'resource', 'type': 'object'}, 'user_tenant': {'key': 'userTenant', 'type': 'object'}, + 'credential': {'key': 'credential', 'type': 'CredentialReference'}, } def __init__( @@ -35388,12 +36632,13 @@ def __init__( **kwargs ): super(WebActivityAuthentication, self).__init__(**kwargs) - self.type = kwargs['type'] + self.type = kwargs.get('type', None) self.pfx = kwargs.get('pfx', None) self.username = kwargs.get('username', None) self.password = kwargs.get('password', None) self.resource = kwargs.get('resource', None) self.user_tenant = kwargs.get('user_tenant', None) + self.credential = kwargs.get('credential', None) class WebLinkedServiceTypeProperties(msrest.serialization.Model): @@ -35410,7 +36655,7 @@ class WebLinkedServiceTypeProperties(msrest.serialization.Model): :param authentication_type: Required. Type of authentication used to connect to the web table source.Constant filled by server. Possible values include: "Basic", "Anonymous", "ClientCertificate". - :type authentication_type: str or ~data_factory_management_client.models.WebAuthenticationType + :type authentication_type: str or ~azure.mgmt.datafactory.models.WebAuthenticationType """ _validation = { @@ -35447,7 +36692,7 @@ class WebAnonymousAuthentication(WebLinkedServiceTypeProperties): :param authentication_type: Required. Type of authentication used to connect to the web table source.Constant filled by server. Possible values include: "Basic", "Anonymous", "ClientCertificate". - :type authentication_type: str or ~data_factory_management_client.models.WebAuthenticationType + :type authentication_type: str or ~azure.mgmt.datafactory.models.WebAuthenticationType """ _validation = { @@ -35479,12 +36724,12 @@ class WebBasicAuthentication(WebLinkedServiceTypeProperties): :param authentication_type: Required. Type of authentication used to connect to the web table source.Constant filled by server. Possible values include: "Basic", "Anonymous", "ClientCertificate". - :type authentication_type: str or ~data_factory_management_client.models.WebAuthenticationType + :type authentication_type: str or ~azure.mgmt.datafactory.models.WebAuthenticationType :param username: Required. User name for Basic authentication. Type: string (or Expression with resultType string). :type username: object :param password: Required. The password for Basic authentication. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase """ _validation = { @@ -35522,11 +36767,11 @@ class WebClientCertificateAuthentication(WebLinkedServiceTypeProperties): :param authentication_type: Required. Type of authentication used to connect to the web table source.Constant filled by server. Possible values include: "Basic", "Anonymous", "ClientCertificate". - :type authentication_type: str or ~data_factory_management_client.models.WebAuthenticationType + :type authentication_type: str or ~azure.mgmt.datafactory.models.WebAuthenticationType :param pfx: Required. Base64-encoded contents of a PFX file. - :type pfx: ~data_factory_management_client.models.SecretBase + :type pfx: ~azure.mgmt.datafactory.models.SecretBase :param password: Required. Password for the PFX file. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase """ _validation = { @@ -35568,11 +36813,11 @@ class WebHookActivity(Activity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param method: Required. Rest API method for target endpoint. Possible values include: "POST". - :type method: str or ~data_factory_management_client.models.WebHookActivityMethod + :type method: str or ~azure.mgmt.datafactory.models.WebHookActivityMethod :param url: Required. WebHook activity target endpoint and path. Type: string (or Expression with resultType string). :type url: object @@ -35588,7 +36833,7 @@ class WebHookActivity(Activity): method, not allowed for GET method Type: string (or Expression with resultType string). :type body: object :param authentication: Authentication method used for calling the endpoint. - :type authentication: ~data_factory_management_client.models.WebActivityAuthentication + :type authentication: ~azure.mgmt.datafactory.models.WebActivityAuthentication :param report_status_on_call_back: When set to true, statusCode, output and error in callback request body will be consumed by activity. The activity can be marked as failed by setting statusCode >= 400 in callback request. Default is false. Type: boolean (or Expression with @@ -35645,15 +36890,15 @@ class WebLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type_properties: Required. Web linked service properties. - :type type_properties: ~data_factory_management_client.models.WebLinkedServiceTypeProperties + :type type_properties: ~azure.mgmt.datafactory.models.WebLinkedServiceTypeProperties """ _validation = { @@ -35699,9 +36944,12 @@ class WebSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -35714,7 +36962,8 @@ class WebSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -35745,14 +36994,14 @@ class WebTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param index: Required. The zero-based index of the table in the web page. Type: integer (or Expression with resultType integer), minimum: 0. :type index: object @@ -35802,11 +37051,11 @@ class XeroLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_properties: Properties used to connect to Xero. It is mutually exclusive with @@ -35815,11 +37064,11 @@ class XeroLinkedService(LinkedService): :param host: The endpoint of the Xero server. (i.e. api.xero.com). :type host: object :param consumer_key: The consumer key associated with the Xero application. - :type consumer_key: ~data_factory_management_client.models.SecretBase + :type consumer_key: ~azure.mgmt.datafactory.models.SecretBase :param private_key: The private key from the .pem file that was generated for your Xero private application. You must include all the text from the .pem file, including the Unix line endings( ). - :type private_key: ~data_factory_management_client.models.SecretBase + :type private_key: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. :type use_encrypted_endpoints: object @@ -35892,14 +37141,14 @@ class XeroObjectDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ @@ -35950,12 +37199,15 @@ class XeroSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -35971,8 +37223,9 @@ class XeroSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -36004,16 +37257,16 @@ class XmlDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param location: The location of the json data storage. - :type location: ~data_factory_management_client.models.DatasetLocation + :type location: ~azure.mgmt.datafactory.models.DatasetLocation :param encoding_name: The code page name of the preferred encoding. If not specified, the default value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column of the table in the following link to set supported values: @@ -36023,7 +37276,7 @@ class XmlDataset(Dataset): :param null_value: The null value string. Type: string (or Expression with resultType string). :type null_value: object :param compression: The data compression method used for the json dataset. - :type compression: ~data_factory_management_client.models.DatasetCompression + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression """ _validation = { @@ -36070,7 +37323,7 @@ class XmlReadSettings(FormatReadSettings): :param type: Required. The read setting type.Constant filled by server. :type type: str :param compression_properties: Compression settings. - :type compression_properties: ~data_factory_management_client.models.CompressionReadSettings + :type compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings :param validation_mode: Indicates what validation method is used when reading the xml files. Allowed values: 'none', 'xsd', or 'dtd'. Type: string (or Expression with resultType string). :type validation_mode: object @@ -36133,13 +37386,16 @@ class XmlSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Xml store settings. - :type store_settings: ~data_factory_management_client.models.StoreReadSettings + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :param format_settings: Xml format settings. - :type format_settings: ~data_factory_management_client.models.XmlReadSettings + :type format_settings: ~azure.mgmt.datafactory.models.XmlReadSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -36152,9 +37408,10 @@ class XmlSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'XmlReadSettings'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -36213,11 +37470,11 @@ class ZohoLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_properties: Properties used to connect to Zoho. It is mutually exclusive with @@ -36226,7 +37483,7 @@ class ZohoLinkedService(LinkedService): :param endpoint: The endpoint of the Zoho server. (i.e. crm.zoho.com/crm/private). :type endpoint: object :param access_token: The access token for Zoho authentication. - :type access_token: ~data_factory_management_client.models.SecretBase + :type access_token: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. :type use_encrypted_endpoints: object @@ -36297,14 +37554,14 @@ class ZohoObjectDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ @@ -36355,12 +37612,15 @@ class ZohoSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -36376,8 +37636,9 @@ class ZohoSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models_py3.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models_py3.py index f6ebc8328ae..0bf0d9eaa2d 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models_py3.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models_py3.py @@ -18,7 +18,7 @@ class AccessPolicyResponse(msrest.serialization.Model): """Get Data Plane read only token response definition. :param policy: The user access policy. - :type policy: ~data_factory_management_client.models.UserAccessPolicy + :type policy: ~azure.mgmt.datafactory.models.UserAccessPolicy :param access_token: Data Plane read only access token. :type access_token: str :param data_plane_url: Data Plane service base URL. @@ -63,9 +63,9 @@ class Activity(msrest.serialization.Model): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] """ _validation = { @@ -116,8 +116,7 @@ class ActivityDependency(msrest.serialization.Model): :param activity: Required. Activity name. :type activity: str :param dependency_conditions: Required. Match-Condition for the dependency. - :type dependency_conditions: list[str or - ~data_factory_management_client.models.DependencyCondition] + :type dependency_conditions: list[str or ~azure.mgmt.datafactory.models.DependencyCondition] """ _validation = { @@ -300,7 +299,7 @@ class ActivityRunsQueryResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. :param value: Required. List of activity runs. - :type value: list[~data_factory_management_client.models.ActivityRun] + :type value: list[~azure.mgmt.datafactory.models.ActivityRun] :param continuation_token: The continuation token for getting the next page of results, if any remaining results exist, null otherwise. :type continuation_token: str @@ -388,11 +387,11 @@ class LinkedService(msrest.serialization.Model): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] """ @@ -444,11 +443,11 @@ class AmazonMwsLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param endpoint: Required. The endpoint of the Amazon MWS server, (i.e. @@ -461,11 +460,11 @@ class AmazonMwsLinkedService(LinkedService): :param seller_id: Required. The Amazon seller ID. :type seller_id: object :param mws_auth_token: The Amazon MWS authentication token. - :type mws_auth_token: ~data_factory_management_client.models.SecretBase + :type mws_auth_token: ~azure.mgmt.datafactory.models.SecretBase :param access_key_id: Required. The access key id used to access data. :type access_key_id: object :param secret_key: The secret key used to access data. - :type secret_key: ~data_factory_management_client.models.SecretBase + :type secret_key: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. :type use_encrypted_endpoints: object @@ -565,14 +564,14 @@ class Dataset(msrest.serialization.Model): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder """ _validation = { @@ -640,14 +639,14 @@ class AmazonMwsObjectDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ @@ -711,6 +710,9 @@ class CopySource(msrest.serialization.Model): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object """ _validation = { @@ -723,6 +725,7 @@ class CopySource(msrest.serialization.Model): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, } _subtype_map = { @@ -736,6 +739,7 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, **kwargs ): super(CopySource, self).__init__(**kwargs) @@ -744,6 +748,7 @@ def __init__( self.source_retry_count = source_retry_count self.source_retry_wait = source_retry_wait self.max_concurrent_connections = max_concurrent_connections + self.disable_metrics_collection = disable_metrics_collection class TabularSource(CopySource): @@ -768,12 +773,15 @@ class TabularSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -786,8 +794,9 @@ class TabularSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } _subtype_map = { @@ -801,11 +810,12 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, **kwargs ): - super(TabularSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(TabularSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'TabularSource' # type: str self.query_timeout = query_timeout self.additional_columns = additional_columns @@ -830,12 +840,15 @@ class AmazonMwsSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -851,8 +864,9 @@ class AmazonMwsSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -863,12 +877,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(AmazonMwsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(AmazonMwsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'AmazonMWSSource' # type: str self.query = query @@ -884,11 +899,11 @@ class AmazonRedshiftLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param server: Required. The name of the Amazon Redshift server. Type: string (or Expression @@ -898,7 +913,7 @@ class AmazonRedshiftLinkedService(LinkedService): resultType string). :type username: object :param password: The password of the Amazon Redshift source. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param database: Required. The database name of the Amazon Redshift source. Type: string (or Expression with resultType string). :type database: object @@ -977,18 +992,21 @@ class AmazonRedshiftSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object :param redshift_unload_settings: The Amazon S3 settings needed for the interim Amazon S3 when copying from Amazon Redshift with unload. With this, data from Amazon Redshift source will be unloaded into S3 first and then copied into the targeted sink from the interim S3. - :type redshift_unload_settings: ~data_factory_management_client.models.RedshiftUnloadSettings + :type redshift_unload_settings: ~azure.mgmt.datafactory.models.RedshiftUnloadSettings """ _validation = { @@ -1001,8 +1019,9 @@ class AmazonRedshiftSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'redshift_unload_settings': {'key': 'redshiftUnloadSettings', 'type': 'RedshiftUnloadSettings'}, } @@ -1014,13 +1033,14 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, redshift_unload_settings: Optional["RedshiftUnloadSettings"] = None, **kwargs ): - super(AmazonRedshiftSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(AmazonRedshiftSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'AmazonRedshiftSource' # type: str self.query = query self.redshift_unload_settings = redshift_unload_settings @@ -1045,14 +1065,14 @@ class AmazonRedshiftTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. :type table_name: object @@ -1118,11 +1138,11 @@ class AmazonS3CompatibleLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param access_key_id: The access key identifier of the Amazon S3 Compatible Identity and Access @@ -1130,7 +1150,7 @@ class AmazonS3CompatibleLinkedService(LinkedService): :type access_key_id: object :param secret_access_key: The secret access key of the Amazon S3 Compatible Identity and Access Management (IAM) user. - :type secret_access_key: ~data_factory_management_client.models.SecretBase + :type secret_access_key: ~azure.mgmt.datafactory.models.SecretBase :param service_url: This value specifies the endpoint to access with the Amazon S3 Compatible Connector. This is an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType @@ -1307,6 +1327,9 @@ class StoreReadSettings(msrest.serialization.Model): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object """ _validation = { @@ -1317,6 +1340,7 @@ class StoreReadSettings(msrest.serialization.Model): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, } _subtype_map = { @@ -1328,12 +1352,14 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, **kwargs ): super(StoreReadSettings, self).__init__(**kwargs) self.additional_properties = additional_properties self.type = 'StoreReadSettings' # type: str self.max_concurrent_connections = max_concurrent_connections + self.disable_metrics_collection = disable_metrics_collection class AmazonS3CompatibleReadSettings(StoreReadSettings): @@ -1349,6 +1375,9 @@ class AmazonS3CompatibleReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -1389,6 +1418,7 @@ class AmazonS3CompatibleReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -1406,6 +1436,7 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, recursive: Optional[object] = None, wildcard_folder_path: Optional[object] = None, wildcard_file_name: Optional[object] = None, @@ -1418,7 +1449,7 @@ def __init__( modified_datetime_end: Optional[object] = None, **kwargs ): - super(AmazonS3CompatibleReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AmazonS3CompatibleReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AmazonS3CompatibleReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path @@ -1451,14 +1482,14 @@ class AmazonS3Dataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param bucket_name: Required. The name of the Amazon S3 bucket. Type: string (or Expression with resultType string). :type bucket_name: object @@ -1478,9 +1509,9 @@ class AmazonS3Dataset(Dataset): Expression with resultType string). :type modified_datetime_end: object :param format: The format of files. - :type format: ~data_factory_management_client.models.DatasetStorageFormat + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :param compression: The data compression method used for the Amazon S3 object. - :type compression: ~data_factory_management_client.models.DatasetCompression + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression """ _validation = { @@ -1553,11 +1584,11 @@ class AmazonS3LinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param authentication_type: The authentication type of S3. Allowed value: AccessKey (default) @@ -1568,13 +1599,13 @@ class AmazonS3LinkedService(LinkedService): :type access_key_id: object :param secret_access_key: The secret access key of the Amazon S3 Identity and Access Management (IAM) user. - :type secret_access_key: ~data_factory_management_client.models.SecretBase + :type secret_access_key: ~azure.mgmt.datafactory.models.SecretBase :param service_url: This value specifies the endpoint to access with the S3 Connector. This is an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). :type service_url: object :param session_token: The session token for the S3 temporary security credential. - :type session_token: ~data_factory_management_client.models.SecretBase + :type session_token: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -1692,6 +1723,9 @@ class AmazonS3ReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -1732,6 +1766,7 @@ class AmazonS3ReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -1749,6 +1784,7 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, recursive: Optional[object] = None, wildcard_folder_path: Optional[object] = None, wildcard_file_name: Optional[object] = None, @@ -1761,7 +1797,7 @@ def __init__( modified_datetime_end: Optional[object] = None, **kwargs ): - super(AmazonS3ReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AmazonS3ReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AmazonS3ReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path @@ -1790,9 +1826,9 @@ class AppendVariableActivity(Activity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param variable_name: Name of the variable whose value needs to be appended to. :type variable_name: str :param value: Value to be appended. Could be a static value or Expression. @@ -1877,20 +1913,19 @@ class AvroDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param location: The location of the avro storage. - :type location: ~data_factory_management_client.models.DatasetLocation - :param avro_compression_codec: Possible values include: "none", "deflate", "snappy", "xz", - "bzip2". - :type avro_compression_codec: str or - ~data_factory_management_client.models.AvroCompressionCodec + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param avro_compression_codec: The data avroCompressionCodec. Type: string (or Expression with + resultType string). + :type avro_compression_codec: object :param avro_compression_level: :type avro_compression_level: int """ @@ -1912,7 +1947,7 @@ class AvroDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'avro_compression_codec': {'key': 'typeProperties.avroCompressionCodec', 'type': 'str'}, + 'avro_compression_codec': {'key': 'typeProperties.avroCompressionCodec', 'type': 'object'}, 'avro_compression_level': {'key': 'typeProperties.avroCompressionLevel', 'type': 'int'}, } @@ -1928,7 +1963,7 @@ def __init__( annotations: Optional[List[object]] = None, folder: Optional["DatasetFolder"] = None, location: Optional["DatasetLocation"] = None, - avro_compression_codec: Optional[Union[str, "AvroCompressionCodec"]] = None, + avro_compression_codec: Optional[object] = None, avro_compression_level: Optional[int] = None, **kwargs ): @@ -2031,7 +2066,7 @@ class CopySink(msrest.serialization.Model): """A copy activity sink. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AvroSink, AzureBlobFsSink, AzureDataExplorerSink, AzureDataLakeStoreSink, AzureDatabricksDeltaLakeSink, AzureMySqlSink, AzurePostgreSqlSink, AzureQueueSink, AzureSearchIndexSink, AzureSqlSink, AzureTableSink, BinarySink, BlobSink, CommonDataServiceForAppsSink, CosmosDbMongoDbApiSink, CosmosDbSqlApiSink, DelimitedTextSink, DocumentDbCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, InformixSink, JsonSink, MicrosoftAccessSink, OdbcSink, OracleSink, OrcSink, ParquetSink, RestSink, SalesforceServiceCloudSink, SalesforceSink, SapCloudForCustomerSink, SnowflakeSink, SqlDwSink, SqlMiSink, SqlServerSink, SqlSink. + sub-classes are: AvroSink, AzureBlobFsSink, AzureDataExplorerSink, AzureDataLakeStoreSink, AzureDatabricksDeltaLakeSink, AzureMySqlSink, AzurePostgreSqlSink, AzureQueueSink, AzureSearchIndexSink, AzureSqlSink, AzureTableSink, BinarySink, BlobSink, CommonDataServiceForAppsSink, CosmosDbMongoDbApiSink, CosmosDbSqlApiSink, DelimitedTextSink, DocumentDbCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, InformixSink, JsonSink, MicrosoftAccessSink, MongoDbAtlasSink, MongoDbV2Sink, OdbcSink, OracleSink, OrcSink, ParquetSink, RestSink, SalesforceServiceCloudSink, SalesforceSink, SapCloudForCustomerSink, SnowflakeSink, SqlDwSink, SqlMiSink, SqlServerSink, SqlSink. All required parameters must be populated in order to send to Azure. @@ -2055,6 +2090,9 @@ class CopySink(msrest.serialization.Model): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object """ _validation = { @@ -2069,10 +2107,11 @@ class CopySink(msrest.serialization.Model): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, } _subtype_map = { - 'type': {'AvroSink': 'AvroSink', 'AzureBlobFSSink': 'AzureBlobFsSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'AzureDatabricksDeltaLakeSink': 'AzureDatabricksDeltaLakeSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'AzureQueueSink': 'AzureQueueSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureSqlSink': 'AzureSqlSink', 'AzureTableSink': 'AzureTableSink', 'BinarySink': 'BinarySink', 'BlobSink': 'BlobSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DelimitedTextSink': 'DelimitedTextSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'FileSystemSink': 'FileSystemSink', 'InformixSink': 'InformixSink', 'JsonSink': 'JsonSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'OdbcSink': 'OdbcSink', 'OracleSink': 'OracleSink', 'OrcSink': 'OrcSink', 'ParquetSink': 'ParquetSink', 'RestSink': 'RestSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SqlDwSink', 'SqlMISink': 'SqlMiSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink'} + 'type': {'AvroSink': 'AvroSink', 'AzureBlobFSSink': 'AzureBlobFsSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'AzureDatabricksDeltaLakeSink': 'AzureDatabricksDeltaLakeSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'AzureQueueSink': 'AzureQueueSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureSqlSink': 'AzureSqlSink', 'AzureTableSink': 'AzureTableSink', 'BinarySink': 'BinarySink', 'BlobSink': 'BlobSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DelimitedTextSink': 'DelimitedTextSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'FileSystemSink': 'FileSystemSink', 'InformixSink': 'InformixSink', 'JsonSink': 'JsonSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'MongoDbAtlasSink': 'MongoDbAtlasSink', 'MongoDbV2Sink': 'MongoDbV2Sink', 'OdbcSink': 'OdbcSink', 'OracleSink': 'OracleSink', 'OrcSink': 'OrcSink', 'ParquetSink': 'ParquetSink', 'RestSink': 'RestSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SqlDwSink', 'SqlMISink': 'SqlMiSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink'} } def __init__( @@ -2084,6 +2123,7 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, **kwargs ): super(CopySink, self).__init__(**kwargs) @@ -2094,6 +2134,7 @@ def __init__( self.sink_retry_count = sink_retry_count self.sink_retry_wait = sink_retry_wait self.max_concurrent_connections = max_concurrent_connections + self.disable_metrics_collection = disable_metrics_collection class AvroSink(CopySink): @@ -2121,10 +2162,13 @@ class AvroSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Avro store settings. - :type store_settings: ~data_factory_management_client.models.StoreWriteSettings + :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings :param format_settings: Avro format settings. - :type format_settings: ~data_factory_management_client.models.AvroWriteSettings + :type format_settings: ~azure.mgmt.datafactory.models.AvroWriteSettings """ _validation = { @@ -2139,6 +2183,7 @@ class AvroSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'AvroWriteSettings'}, } @@ -2152,11 +2197,12 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, store_settings: Optional["StoreWriteSettings"] = None, format_settings: Optional["AvroWriteSettings"] = None, **kwargs ): - super(AvroSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AvroSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AvroSink' # type: str self.store_settings = store_settings self.format_settings = format_settings @@ -2181,11 +2227,14 @@ class AvroSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Avro store settings. - :type store_settings: ~data_factory_management_client.models.StoreReadSettings + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -2198,8 +2247,9 @@ class AvroSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -2209,11 +2259,12 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, store_settings: Optional["StoreReadSettings"] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, **kwargs ): - super(AvroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AvroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AvroSource' # type: str self.store_settings = store_settings self.additional_columns = additional_columns @@ -2387,18 +2438,18 @@ class AzureBatchLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param account_name: Required. The Azure Batch account name. Type: string (or Expression with resultType string). :type account_name: object :param access_key: The Azure Batch account access key. - :type access_key: ~data_factory_management_client.models.SecretBase + :type access_key: ~azure.mgmt.datafactory.models.SecretBase :param batch_uri: Required. The Azure Batch URI. Type: string (or Expression with resultType string). :type batch_uri: object @@ -2406,11 +2457,13 @@ class AzureBatchLinkedService(LinkedService): resultType string). :type pool_name: object :param linked_service_name: Required. The Azure Storage linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object + :param credential: The credential reference containing authentication information. + :type credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { @@ -2434,6 +2487,7 @@ class AzureBatchLinkedService(LinkedService): 'pool_name': {'key': 'typeProperties.poolName', 'type': 'object'}, 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, } def __init__( @@ -2450,6 +2504,7 @@ def __init__( annotations: Optional[List[object]] = None, access_key: Optional["SecretBase"] = None, encrypted_credential: Optional[object] = None, + credential: Optional["CredentialReference"] = None, **kwargs ): super(AzureBatchLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -2460,6 +2515,7 @@ def __init__( self.pool_name = pool_name self.linked_service_name = linked_service_name self.encrypted_credential = encrypted_credential + self.credential = credential class AzureBlobDataset(Dataset): @@ -2481,14 +2537,14 @@ class AzureBlobDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param folder_path: The path of the Azure Blob storage. Type: string (or Expression with resultType string). :type folder_path: object @@ -2505,9 +2561,9 @@ class AzureBlobDataset(Dataset): Expression with resultType string). :type modified_datetime_end: object :param format: The format of the Azure Blob storage. - :type format: ~data_factory_management_client.models.DatasetStorageFormat + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :param compression: The data compression method used for the blob storage. - :type compression: ~data_factory_management_client.models.DatasetCompression + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression """ _validation = { @@ -2584,14 +2640,14 @@ class AzureBlobFsDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param folder_path: The path of the Azure Data Lake Storage Gen2 storage. Type: string (or Expression with resultType string). :type folder_path: object @@ -2599,9 +2655,9 @@ class AzureBlobFsDataset(Dataset): with resultType string). :type file_name: object :param format: The format of the Azure Data Lake Storage Gen2 storage. - :type format: ~data_factory_management_client.models.DatasetStorageFormat + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :param compression: The data compression method used for the blob storage. - :type compression: ~data_factory_management_client.models.DatasetCompression + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression """ _validation = { @@ -2661,11 +2717,11 @@ class AzureBlobFsLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param url: Required. Endpoint for the Azure Data Lake Storage Gen2 service. Type: string (or @@ -2679,7 +2735,7 @@ class AzureBlobFsLinkedService(LinkedService): :type service_principal_id: object :param service_principal_key: The Key of the application used to authenticate against the Azure Data Lake Storage Gen2 account. - :type service_principal_key: ~data_factory_management_client.models.SecretBase + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object @@ -2691,6 +2747,8 @@ class AzureBlobFsLinkedService(LinkedService): encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object + :param credential: The credential reference containing authentication information. + :type credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { @@ -2712,6 +2770,7 @@ class AzureBlobFsLinkedService(LinkedService): 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, } def __init__( @@ -2729,6 +2788,7 @@ def __init__( tenant: Optional[object] = None, azure_cloud_type: Optional[object] = None, encrypted_credential: Optional[object] = None, + credential: Optional["CredentialReference"] = None, **kwargs ): super(AzureBlobFsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -2740,6 +2800,7 @@ def __init__( self.tenant = tenant self.azure_cloud_type = azure_cloud_type self.encrypted_credential = encrypted_credential + self.credential = credential class AzureBlobFsLocation(DatasetLocation): @@ -2802,6 +2863,9 @@ class AzureBlobFsReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -2839,6 +2903,7 @@ class AzureBlobFsReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -2855,6 +2920,7 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, recursive: Optional[object] = None, wildcard_folder_path: Optional[object] = None, wildcard_file_name: Optional[object] = None, @@ -2866,7 +2932,7 @@ def __init__( modified_datetime_end: Optional[object] = None, **kwargs ): - super(AzureBlobFsReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureBlobFsReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureBlobFSReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path @@ -2904,8 +2970,14 @@ class AzureBlobFsSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object + :param metadata: Specify the custom metadata to be added to sink data. Type: array of objects + (or Expression with resultType array of objects). + :type metadata: list[~azure.mgmt.datafactory.models.MetadataItem] """ _validation = { @@ -2920,7 +2992,9 @@ class AzureBlobFsSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'metadata': {'key': 'metadata', 'type': '[MetadataItem]'}, } def __init__( @@ -2932,12 +3006,15 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, copy_behavior: Optional[object] = None, + metadata: Optional[List["MetadataItem"]] = None, **kwargs ): - super(AzureBlobFsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureBlobFsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureBlobFSSink' # type: str self.copy_behavior = copy_behavior + self.metadata = metadata class AzureBlobFsSource(CopySource): @@ -2959,6 +3036,9 @@ class AzureBlobFsSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param treat_empty_as_null: Treat empty as null. Type: boolean (or Expression with resultType boolean). :type treat_empty_as_null: object @@ -2980,6 +3060,7 @@ class AzureBlobFsSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, @@ -2992,12 +3073,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, treat_empty_as_null: Optional[object] = None, skip_header_line_count: Optional[object] = None, recursive: Optional[object] = None, **kwargs ): - super(AzureBlobFsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureBlobFsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureBlobFSSource' # type: str self.treat_empty_as_null = treat_empty_as_null self.skip_header_line_count = skip_header_line_count @@ -3020,6 +3102,9 @@ class StoreWriteSettings(msrest.serialization.Model): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object """ @@ -3032,6 +3117,7 @@ class StoreWriteSettings(msrest.serialization.Model): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } @@ -3044,6 +3130,7 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, copy_behavior: Optional[object] = None, **kwargs ): @@ -3051,6 +3138,7 @@ def __init__( self.additional_properties = additional_properties self.type = 'StoreWriteSettings' # type: str self.max_concurrent_connections = max_concurrent_connections + self.disable_metrics_collection = disable_metrics_collection self.copy_behavior = copy_behavior @@ -3067,6 +3155,9 @@ class AzureBlobFsWriteSettings(StoreWriteSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object :param block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: integer @@ -3082,6 +3173,7 @@ class AzureBlobFsWriteSettings(StoreWriteSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, } @@ -3091,11 +3183,12 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, copy_behavior: Optional[object] = None, block_size_in_mb: Optional[object] = None, **kwargs ): - super(AzureBlobFsWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + super(AzureBlobFsWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, copy_behavior=copy_behavior, **kwargs) self.type = 'AzureBlobFSWriteSettings' # type: str self.block_size_in_mb = block_size_in_mb @@ -3111,24 +3204,24 @@ class AzureBlobStorageLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: The connection string. It is mutually exclusive with sasUri, serviceEndpoint property. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param account_key: The Azure key vault secret reference of accountKey in connection string. - :type account_key: ~data_factory_management_client.models.AzureKeyVaultSecretReference + :type account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param sas_uri: SAS URI of the Azure Blob Storage resource. It is mutually exclusive with connectionString, serviceEndpoint property. Type: string, SecureString or AzureKeyVaultSecretReference. :type sas_uri: object :param sas_token: The Azure key vault secret reference of sasToken in sas uri. - :type sas_token: ~data_factory_management_client.models.AzureKeyVaultSecretReference + :type sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param service_endpoint: Blob service endpoint of the Azure Blob Storage resource. It is mutually exclusive with connectionString, sasUri property. :type service_endpoint: str @@ -3137,7 +3230,7 @@ class AzureBlobStorageLinkedService(LinkedService): :type service_principal_id: object :param service_principal_key: The key of the service principal used to authenticate against Azure SQL Data Warehouse. - :type service_principal_key: ~data_factory_management_client.models.SecretBase + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object @@ -3153,6 +3246,8 @@ class AzureBlobStorageLinkedService(LinkedService): encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: str + :param credential: The credential reference containing authentication information. + :type credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { @@ -3177,6 +3272,7 @@ class AzureBlobStorageLinkedService(LinkedService): 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'account_kind': {'key': 'typeProperties.accountKind', 'type': 'str'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, } def __init__( @@ -3198,6 +3294,7 @@ def __init__( azure_cloud_type: Optional[object] = None, account_kind: Optional[str] = None, encrypted_credential: Optional[str] = None, + credential: Optional["CredentialReference"] = None, **kwargs ): super(AzureBlobStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -3213,6 +3310,7 @@ def __init__( self.azure_cloud_type = azure_cloud_type self.account_kind = account_kind self.encrypted_credential = encrypted_credential + self.credential = credential class AzureBlobStorageLocation(DatasetLocation): @@ -3275,6 +3373,9 @@ class AzureBlobStorageReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -3315,6 +3416,7 @@ class AzureBlobStorageReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -3332,6 +3434,7 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, recursive: Optional[object] = None, wildcard_folder_path: Optional[object] = None, wildcard_file_name: Optional[object] = None, @@ -3344,7 +3447,7 @@ def __init__( modified_datetime_end: Optional[object] = None, **kwargs ): - super(AzureBlobStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureBlobStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureBlobStorageReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path @@ -3371,6 +3474,9 @@ class AzureBlobStorageWriteSettings(StoreWriteSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object :param block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: integer @@ -3386,6 +3492,7 @@ class AzureBlobStorageWriteSettings(StoreWriteSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, } @@ -3395,11 +3502,12 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, copy_behavior: Optional[object] = None, block_size_in_mb: Optional[object] = None, **kwargs ): - super(AzureBlobStorageWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + super(AzureBlobStorageWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, copy_behavior=copy_behavior, **kwargs) self.type = 'AzureBlobStorageWriteSettings' # type: str self.block_size_in_mb = block_size_in_mb @@ -3423,14 +3531,14 @@ class AzureDatabricksDeltaLakeDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table: The name of delta table. Type: string (or Expression with resultType string). :type table: object :param database: The database name of delta table. Type: string (or Expression with resultType @@ -3653,11 +3761,11 @@ class AzureDatabricksDeltaLakeLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks @@ -3666,7 +3774,7 @@ class AzureDatabricksDeltaLakeLinkedService(LinkedService): :param access_token: Access token for databricks REST API. Refer to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string, SecureString or AzureKeyVaultSecretReference. - :type access_token: ~data_factory_management_client.models.SecretBase + :type access_token: ~azure.mgmt.datafactory.models.SecretBase :param cluster_id: The id of an existing interactive cluster that will be used for all runs of this job. Type: string (or Expression with resultType string). :type cluster_id: object @@ -3741,12 +3849,14 @@ class AzureDatabricksDeltaLakeSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). :type pre_copy_script: object :param import_settings: Azure Databricks Delta Lake import settings. - :type import_settings: - ~data_factory_management_client.models.AzureDatabricksDeltaLakeImportCommand + :type import_settings: ~azure.mgmt.datafactory.models.AzureDatabricksDeltaLakeImportCommand """ _validation = { @@ -3761,6 +3871,7 @@ class AzureDatabricksDeltaLakeSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'import_settings': {'key': 'importSettings', 'type': 'AzureDatabricksDeltaLakeImportCommand'}, } @@ -3774,11 +3885,12 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, pre_copy_script: Optional[object] = None, import_settings: Optional["AzureDatabricksDeltaLakeImportCommand"] = None, **kwargs ): - super(AzureDatabricksDeltaLakeSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureDatabricksDeltaLakeSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureDatabricksDeltaLakeSink' # type: str self.pre_copy_script = pre_copy_script self.import_settings = import_settings @@ -3803,12 +3915,14 @@ class AzureDatabricksDeltaLakeSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: Azure Databricks Delta Lake Sql query. Type: string (or Expression with resultType string). :type query: object :param export_settings: Azure Databricks Delta Lake export settings. - :type export_settings: - ~data_factory_management_client.models.AzureDatabricksDeltaLakeExportCommand + :type export_settings: ~azure.mgmt.datafactory.models.AzureDatabricksDeltaLakeExportCommand """ _validation = { @@ -3821,6 +3935,7 @@ class AzureDatabricksDeltaLakeSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'export_settings': {'key': 'exportSettings', 'type': 'AzureDatabricksDeltaLakeExportCommand'}, } @@ -3832,11 +3947,12 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query: Optional[object] = None, export_settings: Optional["AzureDatabricksDeltaLakeExportCommand"] = None, **kwargs ): - super(AzureDatabricksDeltaLakeSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureDatabricksDeltaLakeSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureDatabricksDeltaLakeSource' # type: str self.query = query self.export_settings = export_settings @@ -3853,11 +3969,11 @@ class AzureDatabricksLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks @@ -3866,7 +3982,7 @@ class AzureDatabricksLinkedService(LinkedService): :param access_token: Access token for databricks REST API. Refer to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string (or Expression with resultType string). - :type access_token: ~data_factory_management_client.models.SecretBase + :type access_token: ~azure.mgmt.datafactory.models.SecretBase :param authentication: Required to specify MSI, if using Workspace resource id for databricks REST API. Type: string (or Expression with resultType string). :type authentication: object @@ -3925,6 +4041,8 @@ class AzureDatabricksLinkedService(LinkedService): :param policy_id: The policy id for limiting the ability to configure clusters based on a user defined set of rules. Type: string (or Expression with resultType string). :type policy_id: object + :param credential: The credential reference containing authentication information. + :type credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { @@ -3957,6 +4075,7 @@ class AzureDatabricksLinkedService(LinkedService): 'new_cluster_enable_elastic_disk': {'key': 'typeProperties.newClusterEnableElasticDisk', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, 'policy_id': {'key': 'typeProperties.policyId', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, } def __init__( @@ -3985,6 +4104,7 @@ def __init__( new_cluster_enable_elastic_disk: Optional[object] = None, encrypted_credential: Optional[object] = None, policy_id: Optional[object] = None, + credential: Optional["CredentialReference"] = None, **kwargs ): super(AzureDatabricksLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -4007,6 +4127,7 @@ def __init__( self.new_cluster_enable_elastic_disk = new_cluster_enable_elastic_disk self.encrypted_credential = encrypted_credential self.policy_id = policy_id + self.credential = credential class ExecutionActivity(Activity): @@ -4027,13 +4148,13 @@ class ExecutionActivity(Activity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param linked_service_name: Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. - :type policy: ~data_factory_management_client.models.ActivityPolicy + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy """ _validation = { @@ -4089,13 +4210,13 @@ class AzureDataExplorerCommandActivity(ExecutionActivity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param linked_service_name: Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. - :type policy: ~data_factory_management_client.models.ActivityPolicy + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param command: Required. A control command, according to the Azure Data Explorer command syntax. Type: string (or Expression with resultType string). :type command: object @@ -4154,11 +4275,11 @@ class AzureDataExplorerLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param endpoint: Required. The endpoint of Azure Data Explorer (the engine's endpoint). URL @@ -4170,13 +4291,15 @@ class AzureDataExplorerLinkedService(LinkedService): :type service_principal_id: object :param service_principal_key: The key of the service principal used to authenticate against Kusto. - :type service_principal_key: ~data_factory_management_client.models.SecretBase + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param database: Required. Database name for connection. Type: string (or Expression with resultType string). :type database: object :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object + :param credential: The credential reference containing authentication information. + :type credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { @@ -4197,6 +4320,7 @@ class AzureDataExplorerLinkedService(LinkedService): 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'database': {'key': 'typeProperties.database', 'type': 'object'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, } def __init__( @@ -4212,6 +4336,7 @@ def __init__( service_principal_id: Optional[object] = None, service_principal_key: Optional["SecretBase"] = None, tenant: Optional[object] = None, + credential: Optional["CredentialReference"] = None, **kwargs ): super(AzureDataExplorerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -4221,6 +4346,7 @@ def __init__( self.service_principal_key = service_principal_key self.database = database self.tenant = tenant + self.credential = credential class AzureDataExplorerSink(CopySink): @@ -4248,6 +4374,9 @@ class AzureDataExplorerSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param ingestion_mapping_name: A name of a pre-created csv mapping that was defined on the target Kusto table. Type: string. :type ingestion_mapping_name: object @@ -4271,6 +4400,7 @@ class AzureDataExplorerSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'ingestion_mapping_name': {'key': 'ingestionMappingName', 'type': 'object'}, 'ingestion_mapping_as_json': {'key': 'ingestionMappingAsJson', 'type': 'object'}, 'flush_immediately': {'key': 'flushImmediately', 'type': 'object'}, @@ -4285,12 +4415,13 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, ingestion_mapping_name: Optional[object] = None, ingestion_mapping_as_json: Optional[object] = None, flush_immediately: Optional[object] = None, **kwargs ): - super(AzureDataExplorerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureDataExplorerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureDataExplorerSink' # type: str self.ingestion_mapping_name = ingestion_mapping_name self.ingestion_mapping_as_json = ingestion_mapping_as_json @@ -4316,6 +4447,9 @@ class AzureDataExplorerSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: Required. Database query. Should be a Kusto Query Language (KQL) query. Type: string (or Expression with resultType string). :type query: object @@ -4326,8 +4460,8 @@ class AzureDataExplorerSource(CopySource): pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).. :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -4341,10 +4475,11 @@ class AzureDataExplorerSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'no_truncation': {'key': 'noTruncation', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -4355,12 +4490,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, no_truncation: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, **kwargs ): - super(AzureDataExplorerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureDataExplorerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureDataExplorerSource' # type: str self.query = query self.no_truncation = no_truncation @@ -4387,14 +4523,14 @@ class AzureDataExplorerTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table: The table name of the Azure Data Explorer database. Type: string (or Expression with resultType string). :type table: object @@ -4448,11 +4584,11 @@ class AzureDataLakeAnalyticsLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param account_name: Required. The Azure Data Lake Analytics account name. Type: string (or @@ -4463,7 +4599,7 @@ class AzureDataLakeAnalyticsLinkedService(LinkedService): :type service_principal_id: object :param service_principal_key: The Key of the application used to authenticate against the Azure Data Lake Analytics account. - :type service_principal_key: ~data_factory_management_client.models.SecretBase + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: Required. The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object @@ -4554,14 +4690,14 @@ class AzureDataLakeStoreDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param folder_path: Path to the folder in the Azure Data Lake Store. Type: string (or Expression with resultType string). :type folder_path: object @@ -4569,10 +4705,10 @@ class AzureDataLakeStoreDataset(Dataset): Expression with resultType string). :type file_name: object :param format: The format of the Data Lake Store. - :type format: ~data_factory_management_client.models.DatasetStorageFormat + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :param compression: The data compression method used for the item(s) in the Azure Data Lake Store. - :type compression: ~data_factory_management_client.models.DatasetCompression + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression """ _validation = { @@ -4632,11 +4768,11 @@ class AzureDataLakeStoreLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param data_lake_store_uri: Required. Data Lake Store service URI. Type: string (or Expression @@ -4647,7 +4783,7 @@ class AzureDataLakeStoreLinkedService(LinkedService): :type service_principal_id: object :param service_principal_key: The Key of the application used to authenticate against the Azure Data Lake Store account. - :type service_principal_key: ~data_factory_management_client.models.SecretBase + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object @@ -4668,6 +4804,8 @@ class AzureDataLakeStoreLinkedService(LinkedService): encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object + :param credential: The credential reference containing authentication information. + :type credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { @@ -4691,6 +4829,7 @@ class AzureDataLakeStoreLinkedService(LinkedService): 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, } def __init__( @@ -4710,6 +4849,7 @@ def __init__( subscription_id: Optional[object] = None, resource_group_name: Optional[object] = None, encrypted_credential: Optional[object] = None, + credential: Optional["CredentialReference"] = None, **kwargs ): super(AzureDataLakeStoreLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -4723,6 +4863,7 @@ def __init__( self.subscription_id = subscription_id self.resource_group_name = resource_group_name self.encrypted_credential = encrypted_credential + self.credential = credential class AzureDataLakeStoreLocation(DatasetLocation): @@ -4779,6 +4920,9 @@ class AzureDataLakeStoreReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -4824,6 +4968,7 @@ class AzureDataLakeStoreReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -4842,6 +4987,7 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, recursive: Optional[object] = None, wildcard_folder_path: Optional[object] = None, wildcard_file_name: Optional[object] = None, @@ -4855,7 +5001,7 @@ def __init__( modified_datetime_end: Optional[object] = None, **kwargs ): - super(AzureDataLakeStoreReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureDataLakeStoreReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureDataLakeStoreReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path @@ -4895,6 +5041,9 @@ class AzureDataLakeStoreSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object :param enable_adls_single_file_parallel: Single File Parallel. @@ -4913,6 +5062,7 @@ class AzureDataLakeStoreSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'enable_adls_single_file_parallel': {'key': 'enableAdlsSingleFileParallel', 'type': 'object'}, } @@ -4926,11 +5076,12 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, copy_behavior: Optional[object] = None, enable_adls_single_file_parallel: Optional[object] = None, **kwargs ): - super(AzureDataLakeStoreSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureDataLakeStoreSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureDataLakeStoreSink' # type: str self.copy_behavior = copy_behavior self.enable_adls_single_file_parallel = enable_adls_single_file_parallel @@ -4955,6 +5106,9 @@ class AzureDataLakeStoreSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -4970,6 +5124,7 @@ class AzureDataLakeStoreSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, } @@ -4980,10 +5135,11 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, recursive: Optional[object] = None, **kwargs ): - super(AzureDataLakeStoreSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureDataLakeStoreSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureDataLakeStoreSource' # type: str self.recursive = recursive @@ -5001,6 +5157,9 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object :param expiry_date_time: Specifies the expiry time of the written files. The time is applied to @@ -5017,6 +5176,7 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'expiry_date_time': {'key': 'expiryDateTime', 'type': 'object'}, } @@ -5026,11 +5186,12 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, copy_behavior: Optional[object] = None, expiry_date_time: Optional[object] = None, **kwargs ): - super(AzureDataLakeStoreWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + super(AzureDataLakeStoreWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, copy_behavior=copy_behavior, **kwargs) self.type = 'AzureDataLakeStoreWriteSettings' # type: str self.expiry_date_time = expiry_date_time @@ -5046,11 +5207,11 @@ class AzureFileStorageLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param host: Host name of the server. Type: string (or Expression with resultType string). @@ -5059,17 +5220,17 @@ class AzureFileStorageLinkedService(LinkedService): string). :type user_id: object :param password: Password to logon the server. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param connection_string: The connection string. It is mutually exclusive with sasUri property. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param account_key: The Azure key vault secret reference of accountKey in connection string. - :type account_key: ~data_factory_management_client.models.AzureKeyVaultSecretReference + :type account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param sas_uri: SAS URI of the Azure File resource. It is mutually exclusive with connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. :type sas_uri: object :param sas_token: The Azure key vault secret reference of sasToken in sas uri. - :type sas_token: ~data_factory_management_client.models.AzureKeyVaultSecretReference + :type sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param file_share: The azure file share name. It is required when auth with accountKey/sasToken. Type: string (or Expression with resultType string). :type file_share: object @@ -5193,6 +5354,9 @@ class AzureFileStorageReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -5233,6 +5397,7 @@ class AzureFileStorageReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -5250,6 +5415,7 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, recursive: Optional[object] = None, wildcard_folder_path: Optional[object] = None, wildcard_file_name: Optional[object] = None, @@ -5262,7 +5428,7 @@ def __init__( modified_datetime_end: Optional[object] = None, **kwargs ): - super(AzureFileStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureFileStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureFileStorageReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path @@ -5289,6 +5455,9 @@ class AzureFileStorageWriteSettings(StoreWriteSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object """ @@ -5301,6 +5470,7 @@ class AzureFileStorageWriteSettings(StoreWriteSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } @@ -5309,10 +5479,11 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, copy_behavior: Optional[object] = None, **kwargs ): - super(AzureFileStorageWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + super(AzureFileStorageWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, copy_behavior=copy_behavior, **kwargs) self.type = 'AzureFileStorageWriteSettings' # type: str @@ -5331,16 +5502,16 @@ class AzureFunctionActivity(ExecutionActivity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param linked_service_name: Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. - :type policy: ~data_factory_management_client.models.ActivityPolicy + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param method: Required. Rest API method for target endpoint. Possible values include: "GET", "POST", "PUT", "DELETE", "OPTIONS", "HEAD", "TRACE". - :type method: str or ~data_factory_management_client.models.AzureFunctionActivityMethod + :type method: str or ~azure.mgmt.datafactory.models.AzureFunctionActivityMethod :param function_name: Required. Name of the Function that the Azure Function Activity will call. Type: string (or Expression with resultType string). :type function_name: object @@ -5410,22 +5581,29 @@ class AzureFunctionLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param function_app_url: Required. The endpoint of the Azure Function App. URL will be in the format https://:code:``.azurewebsites.net. :type function_app_url: object :param function_key: Function or Host key for Azure Function App. - :type function_key: ~data_factory_management_client.models.SecretBase + :type function_key: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object + :param credential: The credential reference containing authentication information. + :type credential: ~azure.mgmt.datafactory.models.CredentialReference + :param resource_id: Allowed token audiences for azure function. + :type resource_id: object + :param authentication: Type of authentication (Required to specify MSI) used to connect to + AzureFunction. Type: string (or Expression with resultType string). + :type authentication: object """ _validation = { @@ -5443,6 +5621,9 @@ class AzureFunctionLinkedService(LinkedService): 'function_app_url': {'key': 'typeProperties.functionAppUrl', 'type': 'object'}, 'function_key': {'key': 'typeProperties.functionKey', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, + 'resource_id': {'key': 'typeProperties.resourceId', 'type': 'object'}, + 'authentication': {'key': 'typeProperties.authentication', 'type': 'object'}, } def __init__( @@ -5456,6 +5637,9 @@ def __init__( annotations: Optional[List[object]] = None, function_key: Optional["SecretBase"] = None, encrypted_credential: Optional[object] = None, + credential: Optional["CredentialReference"] = None, + resource_id: Optional[object] = None, + authentication: Optional[object] = None, **kwargs ): super(AzureFunctionLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -5463,6 +5647,9 @@ def __init__( self.function_app_url = function_app_url self.function_key = function_key self.encrypted_credential = encrypted_credential + self.credential = credential + self.resource_id = resource_id + self.authentication = authentication class AzureKeyVaultLinkedService(LinkedService): @@ -5476,16 +5663,18 @@ class AzureKeyVaultLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param base_url: Required. The base URL of the Azure Key Vault. e.g. https://myakv.vault.azure.net Type: string (or Expression with resultType string). :type base_url: object + :param credential: The credential reference containing authentication information. + :type credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { @@ -5501,6 +5690,7 @@ class AzureKeyVaultLinkedService(LinkedService): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'base_url': {'key': 'typeProperties.baseUrl', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, } def __init__( @@ -5512,11 +5702,13 @@ def __init__( description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, + credential: Optional["CredentialReference"] = None, **kwargs ): super(AzureKeyVaultLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'AzureKeyVault' # type: str self.base_url = base_url + self.credential = credential class SecretBase(msrest.serialization.Model): @@ -5559,7 +5751,7 @@ class AzureKeyVaultSecretReference(SecretBase): :param type: Required. Type of the secret.Constant filled by server. :type type: str :param store: Required. The Azure Key Vault linked service reference. - :type store: ~data_factory_management_client.models.LinkedServiceReference + :type store: ~azure.mgmt.datafactory.models.LinkedServiceReference :param secret_name: Required. The name of the secret in Azure Key Vault. Type: string (or Expression with resultType string). :type secret_name: object @@ -5607,18 +5799,18 @@ class AzureMariaDbLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param pwd: The Azure key vault secret reference of password in connection string. - :type pwd: ~data_factory_management_client.models.AzureKeyVaultSecretReference + :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -5680,12 +5872,15 @@ class AzureMariaDbSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -5701,8 +5896,9 @@ class AzureMariaDbSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -5713,12 +5909,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(AzureMariaDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(AzureMariaDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'AzureMariaDBSource' # type: str self.query = query @@ -5742,14 +5939,14 @@ class AzureMariaDbTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ @@ -5806,13 +6003,13 @@ class AzureMlBatchExecutionActivity(ExecutionActivity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param linked_service_name: Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. - :type policy: ~data_factory_management_client.models.ActivityPolicy + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param global_parameters: Key,Value pairs to be passed to the Azure ML Batch Execution Service endpoint. Keys must match the names of web service parameters defined in the published Azure ML web service. Values will be passed in the GlobalParameters property of the Azure ML batch @@ -5822,14 +6019,12 @@ class AzureMlBatchExecutionActivity(ExecutionActivity): Service Outputs to AzureMLWebServiceFile objects specifying the output Blob locations. This information will be passed in the WebServiceOutputs property of the Azure ML batch execution request. - :type web_service_outputs: dict[str, - ~data_factory_management_client.models.AzureMlWebServiceFile] + :type web_service_outputs: dict[str, ~azure.mgmt.datafactory.models.AzureMlWebServiceFile] :param web_service_inputs: Key,Value pairs, mapping the names of Azure ML endpoint's Web Service Inputs to AzureMLWebServiceFile objects specifying the input Blob locations.. This information will be passed in the WebServiceInputs property of the Azure ML batch execution request. - :type web_service_inputs: dict[str, - ~data_factory_management_client.models.AzureMlWebServiceFile] + :type web_service_inputs: dict[str, ~azure.mgmt.datafactory.models.AzureMlWebServiceFile] """ _validation = { @@ -5888,13 +6083,13 @@ class AzureMlExecutePipelineActivity(ExecutionActivity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param linked_service_name: Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. - :type policy: ~data_factory_management_client.models.ActivityPolicy + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param ml_pipeline_id: ID of the published Azure ML pipeline. Type: string (or Expression with resultType string). :type ml_pipeline_id: object @@ -5995,18 +6190,18 @@ class AzureMlLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param ml_endpoint: Required. The Batch Execution REST URL for an Azure ML Studio Web Service endpoint. Type: string (or Expression with resultType string). :type ml_endpoint: object :param api_key: Required. The API key for accessing the Azure ML model endpoint. - :type api_key: ~data_factory_management_client.models.SecretBase + :type api_key: ~azure.mgmt.datafactory.models.SecretBase :param update_resource_endpoint: The Update Resource REST URL for an Azure ML Studio Web Service endpoint. Type: string (or Expression with resultType string). :type update_resource_endpoint: object @@ -6016,7 +6211,7 @@ class AzureMlLinkedService(LinkedService): :type service_principal_id: object :param service_principal_key: The key of the service principal used to authenticate against the ARM-based updateResourceEndpoint of an Azure ML Studio web service. - :type service_principal_key: ~data_factory_management_client.models.SecretBase + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object @@ -6024,6 +6219,9 @@ class AzureMlLinkedService(LinkedService): encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object + :param authentication: Type of authentication (Required to specify MSI) used to connect to + AzureML. Type: string (or Expression with resultType string). + :type authentication: object """ _validation = { @@ -6046,6 +6244,7 @@ class AzureMlLinkedService(LinkedService): 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'authentication': {'key': 'typeProperties.authentication', 'type': 'object'}, } def __init__( @@ -6063,6 +6262,7 @@ def __init__( service_principal_key: Optional["SecretBase"] = None, tenant: Optional[object] = None, encrypted_credential: Optional[object] = None, + authentication: Optional[object] = None, **kwargs ): super(AzureMlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -6074,6 +6274,7 @@ def __init__( self.service_principal_key = service_principal_key self.tenant = tenant self.encrypted_credential = encrypted_credential + self.authentication = authentication class AzureMlServiceLinkedService(LinkedService): @@ -6087,11 +6288,11 @@ class AzureMlServiceLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param subscription_id: Required. Azure ML Service workspace subscription ID. Type: string (or @@ -6109,7 +6310,7 @@ class AzureMlServiceLinkedService(LinkedService): :type service_principal_id: object :param service_principal_key: The key of the service principal used to authenticate against the endpoint of a published Azure ML Service pipeline. - :type service_principal_key: ~data_factory_management_client.models.SecretBase + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object @@ -6185,20 +6386,19 @@ class AzureMlUpdateResourceActivity(ExecutionActivity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param linked_service_name: Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. - :type policy: ~data_factory_management_client.models.ActivityPolicy + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param trained_model_name: Required. Name of the Trained Model module in the Web Service experiment to be updated. Type: string (or Expression with resultType string). :type trained_model_name: object :param trained_model_linked_service_name: Required. Name of Azure Storage linked service holding the .ilearner file that will be uploaded by the update operation. - :type trained_model_linked_service_name: - ~data_factory_management_client.models.LinkedServiceReference + :type trained_model_linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param trained_model_file_path: Required. The relative file path in trainedModelLinkedService to represent the .ilearner file that will be uploaded by the update operation. Type: string (or Expression with resultType string). @@ -6259,7 +6459,7 @@ class AzureMlWebServiceFile(msrest.serialization.Model): :type file_path: object :param linked_service_name: Required. Reference to an Azure Storage LinkedService, where Azure ML WebService Input/Output file located. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference """ _validation = { @@ -6295,18 +6495,18 @@ class AzureMySqlLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: Required. The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param password: The Azure key vault secret reference of password in connection string. - :type password: ~data_factory_management_client.models.AzureKeyVaultSecretReference + :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -6375,6 +6575,9 @@ class AzureMySqlSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). :type pre_copy_script: object @@ -6392,6 +6595,7 @@ class AzureMySqlSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -6404,10 +6608,11 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, pre_copy_script: Optional[object] = None, **kwargs ): - super(AzureMySqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureMySqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureMySqlSink' # type: str self.pre_copy_script = pre_copy_script @@ -6431,12 +6636,15 @@ class AzureMySqlSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object """ @@ -6451,8 +6659,9 @@ class AzureMySqlSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -6463,12 +6672,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(AzureMySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(AzureMySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'AzureMySqlSource' # type: str self.query = query @@ -6492,14 +6702,14 @@ class AzureMySqlTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The Azure MySQL database table name. Type: string (or Expression with resultType string). :type table_name: object @@ -6559,18 +6769,18 @@ class AzurePostgreSqlLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param password: The Azure key vault secret reference of password in connection string. - :type password: ~data_factory_management_client.models.AzureKeyVaultSecretReference + :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -6638,6 +6848,9 @@ class AzurePostgreSqlSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). :type pre_copy_script: object @@ -6655,6 +6868,7 @@ class AzurePostgreSqlSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -6667,10 +6881,11 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, pre_copy_script: Optional[object] = None, **kwargs ): - super(AzurePostgreSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzurePostgreSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzurePostgreSqlSink' # type: str self.pre_copy_script = pre_copy_script @@ -6694,12 +6909,15 @@ class AzurePostgreSqlSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -6715,8 +6933,9 @@ class AzurePostgreSqlSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -6727,12 +6946,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(AzurePostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(AzurePostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'AzurePostgreSqlSource' # type: str self.query = query @@ -6756,14 +6976,14 @@ class AzurePostgreSqlTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name of the Azure PostgreSQL database which includes both schema and table. Type: string (or Expression with resultType string). :type table_name: object @@ -6843,6 +7063,9 @@ class AzureQueueSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object """ _validation = { @@ -6857,6 +7080,7 @@ class AzureQueueSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, } def __init__( @@ -6868,9 +7092,10 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, **kwargs ): - super(AzureQueueSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureQueueSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureQueueSink' # type: str @@ -6893,14 +7118,14 @@ class AzureSearchIndexDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param index_name: Required. The name of the Azure Search Index. Type: string (or Expression with resultType string). :type index_name: object @@ -6969,10 +7194,12 @@ class AzureSearchIndexSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param write_behavior: Specify the write behavior when upserting documents into Azure Search Index. Possible values include: "Merge", "Upload". - :type write_behavior: str or - ~data_factory_management_client.models.AzureSearchIndexWriteBehaviorType + :type write_behavior: str or ~azure.mgmt.datafactory.models.AzureSearchIndexWriteBehaviorType """ _validation = { @@ -6987,6 +7214,7 @@ class AzureSearchIndexSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, } @@ -6999,10 +7227,11 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, write_behavior: Optional[Union[str, "AzureSearchIndexWriteBehaviorType"]] = None, **kwargs ): - super(AzureSearchIndexSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureSearchIndexSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureSearchIndexSink' # type: str self.write_behavior = write_behavior @@ -7018,18 +7247,18 @@ class AzureSearchLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param url: Required. URL for Azure Search service. Type: string (or Expression with resultType string). :type url: object :param key: Admin Key for Azure Search service. - :type key: ~data_factory_management_client.models.SecretBase + :type key: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -7084,24 +7313,24 @@ class AzureSqlDatabaseLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: Required. The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param password: The Azure key vault secret reference of password in connection string. - :type password: ~data_factory_management_client.models.AzureKeyVaultSecretReference + :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param service_principal_id: The ID of the service principal used to authenticate against Azure SQL Database. Type: string (or Expression with resultType string). :type service_principal_id: object :param service_principal_key: The key of the service principal used to authenticate against Azure SQL Database. - :type service_principal_key: ~data_factory_management_client.models.SecretBase + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object @@ -7114,8 +7343,9 @@ class AzureSqlDatabaseLinkedService(LinkedService): resultType string). :type encrypted_credential: object :param always_encrypted_settings: Sql always encrypted properties. - :type always_encrypted_settings: - ~data_factory_management_client.models.SqlAlwaysEncryptedProperties + :type always_encrypted_settings: ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties + :param credential: The credential reference containing authentication information. + :type credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { @@ -7138,6 +7368,7 @@ class AzureSqlDatabaseLinkedService(LinkedService): 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, 'always_encrypted_settings': {'key': 'typeProperties.alwaysEncryptedSettings', 'type': 'SqlAlwaysEncryptedProperties'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, } def __init__( @@ -7156,6 +7387,7 @@ def __init__( azure_cloud_type: Optional[object] = None, encrypted_credential: Optional[object] = None, always_encrypted_settings: Optional["SqlAlwaysEncryptedProperties"] = None, + credential: Optional["CredentialReference"] = None, **kwargs ): super(AzureSqlDatabaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -7168,6 +7400,7 @@ def __init__( self.azure_cloud_type = azure_cloud_type self.encrypted_credential = encrypted_credential self.always_encrypted_settings = always_encrypted_settings + self.credential = credential class AzureSqlDwLinkedService(LinkedService): @@ -7181,24 +7414,24 @@ class AzureSqlDwLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: Required. The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param password: The Azure key vault secret reference of password in connection string. - :type password: ~data_factory_management_client.models.AzureKeyVaultSecretReference + :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param service_principal_id: The ID of the service principal used to authenticate against Azure SQL Data Warehouse. Type: string (or Expression with resultType string). :type service_principal_id: object :param service_principal_key: The key of the service principal used to authenticate against Azure SQL Data Warehouse. - :type service_principal_key: ~data_factory_management_client.models.SecretBase + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object @@ -7210,6 +7443,8 @@ class AzureSqlDwLinkedService(LinkedService): encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object + :param credential: The credential reference containing authentication information. + :type credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { @@ -7231,6 +7466,7 @@ class AzureSqlDwLinkedService(LinkedService): 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, } def __init__( @@ -7248,6 +7484,7 @@ def __init__( tenant: Optional[object] = None, azure_cloud_type: Optional[object] = None, encrypted_credential: Optional[object] = None, + credential: Optional["CredentialReference"] = None, **kwargs ): super(AzureSqlDwLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -7259,6 +7496,7 @@ def __init__( self.tenant = tenant self.azure_cloud_type = azure_cloud_type self.encrypted_credential = encrypted_credential + self.credential = credential class AzureSqlDwTableDataset(Dataset): @@ -7280,14 +7518,14 @@ class AzureSqlDwTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. :type table_name: object @@ -7353,24 +7591,24 @@ class AzureSqlMiLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: Required. The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param password: The Azure key vault secret reference of password in connection string. - :type password: ~data_factory_management_client.models.AzureKeyVaultSecretReference + :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param service_principal_id: The ID of the service principal used to authenticate against Azure SQL Managed Instance. Type: string (or Expression with resultType string). :type service_principal_id: object :param service_principal_key: The key of the service principal used to authenticate against Azure SQL Managed Instance. - :type service_principal_key: ~data_factory_management_client.models.SecretBase + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object @@ -7383,8 +7621,9 @@ class AzureSqlMiLinkedService(LinkedService): resultType string). :type encrypted_credential: object :param always_encrypted_settings: Sql always encrypted properties. - :type always_encrypted_settings: - ~data_factory_management_client.models.SqlAlwaysEncryptedProperties + :type always_encrypted_settings: ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties + :param credential: The credential reference containing authentication information. + :type credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { @@ -7407,6 +7646,7 @@ class AzureSqlMiLinkedService(LinkedService): 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, 'always_encrypted_settings': {'key': 'typeProperties.alwaysEncryptedSettings', 'type': 'SqlAlwaysEncryptedProperties'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, } def __init__( @@ -7425,6 +7665,7 @@ def __init__( azure_cloud_type: Optional[object] = None, encrypted_credential: Optional[object] = None, always_encrypted_settings: Optional["SqlAlwaysEncryptedProperties"] = None, + credential: Optional["CredentialReference"] = None, **kwargs ): super(AzureSqlMiLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -7437,6 +7678,7 @@ def __init__( self.azure_cloud_type = azure_cloud_type self.encrypted_credential = encrypted_credential self.always_encrypted_settings = always_encrypted_settings + self.credential = credential class AzureSqlMiTableDataset(Dataset): @@ -7458,14 +7700,14 @@ class AzureSqlMiTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. :type table_name: object @@ -7545,6 +7787,9 @@ class AzureSqlSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). :type sql_writer_stored_procedure_name: object @@ -7556,13 +7801,21 @@ class AzureSqlSink(CopySink): :type pre_copy_script: object :param stored_procedure_parameters: SQL stored procedure parameters. :type stored_procedure_parameters: dict[str, - ~data_factory_management_client.models.StoredProcedureParameter] + ~azure.mgmt.datafactory.models.StoredProcedureParameter] :param stored_procedure_table_type_parameter_name: The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). :type stored_procedure_table_type_parameter_name: object :param table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). :type table_option: object + :param sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or + Expression with resultType boolean). + :type sql_writer_use_table_lock: object + :param write_behavior: Write behavior when copying data into Azure SQL. Type: + SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum). + :type write_behavior: object + :param upsert_settings: SQL upsert settings. + :type upsert_settings: ~azure.mgmt.datafactory.models.SqlUpsertSettings """ _validation = { @@ -7577,12 +7830,16 @@ class AzureSqlSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, 'table_option': {'key': 'tableOption', 'type': 'object'}, + 'sql_writer_use_table_lock': {'key': 'sqlWriterUseTableLock', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + 'upsert_settings': {'key': 'upsertSettings', 'type': 'SqlUpsertSettings'}, } def __init__( @@ -7594,15 +7851,19 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, sql_writer_stored_procedure_name: Optional[object] = None, sql_writer_table_type: Optional[object] = None, pre_copy_script: Optional[object] = None, stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, stored_procedure_table_type_parameter_name: Optional[object] = None, table_option: Optional[object] = None, + sql_writer_use_table_lock: Optional[object] = None, + write_behavior: Optional[object] = None, + upsert_settings: Optional["SqlUpsertSettings"] = None, **kwargs ): - super(AzureSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureSqlSink' # type: str self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name self.sql_writer_table_type = sql_writer_table_type @@ -7610,6 +7871,9 @@ def __init__( self.stored_procedure_parameters = stored_procedure_parameters self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name self.table_option = table_option + self.sql_writer_use_table_lock = sql_writer_use_table_lock + self.write_behavior = write_behavior + self.upsert_settings = upsert_settings class AzureSqlSource(TabularSource): @@ -7631,12 +7895,15 @@ class AzureSqlSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database @@ -7646,14 +7913,14 @@ class AzureSqlSource(TabularSource): :param stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". :type stored_procedure_parameters: dict[str, - ~data_factory_management_client.models.StoredProcedureParameter] + ~azure.mgmt.datafactory.models.StoredProcedureParameter] :param produce_additional_types: Which additional types to produce. :type produce_additional_types: object :param partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". :type partition_option: object :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~data_factory_management_client.models.SqlPartitionSettings + :type partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings """ _validation = { @@ -7666,8 +7933,9 @@ class AzureSqlSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, @@ -7683,8 +7951,9 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, sql_reader_query: Optional[object] = None, sql_reader_stored_procedure_name: Optional[object] = None, stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, @@ -7693,7 +7962,7 @@ def __init__( partition_settings: Optional["SqlPartitionSettings"] = None, **kwargs ): - super(AzureSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(AzureSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'AzureSqlSource' # type: str self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name @@ -7722,14 +7991,14 @@ class AzureSqlTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. :type table_name: object @@ -7795,23 +8064,23 @@ class AzureStorageLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: The connection string. It is mutually exclusive with sasUri property. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param account_key: The Azure key vault secret reference of accountKey in connection string. - :type account_key: ~data_factory_management_client.models.AzureKeyVaultSecretReference + :type account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param sas_uri: SAS URI of the Azure Storage resource. It is mutually exclusive with connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. :type sas_uri: object :param sas_token: The Azure key vault secret reference of sasToken in sas uri. - :type sas_token: ~data_factory_management_client.models.AzureKeyVaultSecretReference + :type sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -7879,14 +8148,14 @@ class AzureTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: Required. The table name of the Azure Table storage. Type: string (or Expression with resultType string). :type table_name: object @@ -7955,6 +8224,9 @@ class AzureTableSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param azure_table_default_partition_key_value: Azure Table default partition key value. Type: string (or Expression with resultType string). :type azure_table_default_partition_key_value: object @@ -7981,6 +8253,7 @@ class AzureTableSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'azure_table_default_partition_key_value': {'key': 'azureTableDefaultPartitionKeyValue', 'type': 'object'}, 'azure_table_partition_key_name': {'key': 'azureTablePartitionKeyName', 'type': 'object'}, 'azure_table_row_key_name': {'key': 'azureTableRowKeyName', 'type': 'object'}, @@ -7996,13 +8269,14 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, azure_table_default_partition_key_value: Optional[object] = None, azure_table_partition_key_name: Optional[object] = None, azure_table_row_key_name: Optional[object] = None, azure_table_insert_type: Optional[object] = None, **kwargs ): - super(AzureTableSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureTableSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureTableSink' # type: str self.azure_table_default_partition_key_value = azure_table_default_partition_key_value self.azure_table_partition_key_name = azure_table_partition_key_name @@ -8029,12 +8303,15 @@ class AzureTableSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param azure_table_source_query: Azure Table source query. Type: string (or Expression with resultType string). :type azure_table_source_query: object @@ -8053,8 +8330,9 @@ class AzureTableSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'azure_table_source_query': {'key': 'azureTableSourceQuery', 'type': 'object'}, 'azure_table_source_ignore_table_not_found': {'key': 'azureTableSourceIgnoreTableNotFound', 'type': 'object'}, } @@ -8066,13 +8344,14 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, azure_table_source_query: Optional[object] = None, azure_table_source_ignore_table_not_found: Optional[object] = None, **kwargs ): - super(AzureTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(AzureTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'AzureTableSource' # type: str self.azure_table_source_query = azure_table_source_query self.azure_table_source_ignore_table_not_found = azure_table_source_ignore_table_not_found @@ -8089,23 +8368,23 @@ class AzureTableStorageLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: The connection string. It is mutually exclusive with sasUri property. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param account_key: The Azure key vault secret reference of accountKey in connection string. - :type account_key: ~data_factory_management_client.models.AzureKeyVaultSecretReference + :type account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param sas_uri: SAS URI of the Azure Storage resource. It is mutually exclusive with connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. :type sas_uri: object :param sas_token: The Azure key vault secret reference of sasToken in sas uri. - :type sas_token: ~data_factory_management_client.models.AzureKeyVaultSecretReference + :type sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -8173,18 +8452,18 @@ class BinaryDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param location: The location of the Binary storage. - :type location: ~data_factory_management_client.models.DatasetLocation + :type location: ~azure.mgmt.datafactory.models.DatasetLocation :param compression: The data compression method used for the binary dataset. - :type compression: ~data_factory_management_client.models.DatasetCompression + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression """ _validation = { @@ -8277,7 +8556,7 @@ class BinaryReadSettings(FormatReadSettings): :param type: Required. The read setting type.Constant filled by server. :type type: str :param compression_properties: Compression settings. - :type compression_properties: ~data_factory_management_client.models.CompressionReadSettings + :type compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings """ _validation = { @@ -8327,8 +8606,11 @@ class BinarySink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Binary store settings. - :type store_settings: ~data_factory_management_client.models.StoreWriteSettings + :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings """ _validation = { @@ -8343,6 +8625,7 @@ class BinarySink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, } @@ -8355,10 +8638,11 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, store_settings: Optional["StoreWriteSettings"] = None, **kwargs ): - super(BinarySink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(BinarySink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'BinarySink' # type: str self.store_settings = store_settings @@ -8382,10 +8666,13 @@ class BinarySource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Binary store settings. - :type store_settings: ~data_factory_management_client.models.StoreReadSettings + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :param format_settings: Binary format settings. - :type format_settings: ~data_factory_management_client.models.BinaryReadSettings + :type format_settings: ~azure.mgmt.datafactory.models.BinaryReadSettings """ _validation = { @@ -8398,6 +8685,7 @@ class BinarySource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'BinaryReadSettings'}, } @@ -8409,11 +8697,12 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, store_settings: Optional["StoreReadSettings"] = None, format_settings: Optional["BinaryReadSettings"] = None, **kwargs ): - super(BinarySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(BinarySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'BinarySource' # type: str self.store_settings = store_settings self.format_settings = format_settings @@ -8438,7 +8727,7 @@ class Trigger(msrest.serialization.Model): :type description: str :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". - :vartype runtime_state: str or ~data_factory_management_client.models.TriggerRuntimeState + :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState :param annotations: List of tags that can be used for describing the trigger. :type annotations: list[object] """ @@ -8495,11 +8784,11 @@ class MultiplePipelineTrigger(Trigger): :type description: str :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". - :vartype runtime_state: str or ~data_factory_management_client.models.TriggerRuntimeState + :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState :param annotations: List of tags that can be used for describing the trigger. :type annotations: list[object] :param pipelines: Pipelines that need to be started. - :type pipelines: list[~data_factory_management_client.models.TriggerPipelineReference] + :type pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] """ _validation = { @@ -8550,11 +8839,11 @@ class BlobEventsTrigger(MultiplePipelineTrigger): :type description: str :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". - :vartype runtime_state: str or ~data_factory_management_client.models.TriggerRuntimeState + :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState :param annotations: List of tags that can be used for describing the trigger. :type annotations: list[object] :param pipelines: Pipelines that need to be started. - :type pipelines: list[~data_factory_management_client.models.TriggerPipelineReference] + :type pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] :param blob_path_begins_with: The blob path must begin with the pattern provided for trigger to fire. For example, '/records/blobs/december/' will only fire the trigger for blobs in the december folder under the records container. At least one of these must be provided: @@ -8567,7 +8856,7 @@ class BlobEventsTrigger(MultiplePipelineTrigger): :param ignore_empty_blobs: If set to true, blobs with zero bytes will be ignored. :type ignore_empty_blobs: bool :param events: Required. The type of events that cause this trigger to fire. - :type events: list[str or ~data_factory_management_client.models.BlobEventTypes] + :type events: list[str or ~azure.mgmt.datafactory.models.BlobEventTypes] :param scope: Required. The ARM resource ID of the Storage Account. :type scope: str """ @@ -8641,6 +8930,9 @@ class BlobSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param blob_writer_overwrite_files: Blob writer overwrite files. Type: boolean (or Expression with resultType boolean). :type blob_writer_overwrite_files: object @@ -8652,6 +8944,9 @@ class BlobSink(CopySink): :type blob_writer_add_header: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object + :param metadata: Specify the custom metadata to be added to sink data. Type: array of objects + (or Expression with resultType array of objects). + :type metadata: list[~azure.mgmt.datafactory.models.MetadataItem] """ _validation = { @@ -8666,10 +8961,12 @@ class BlobSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'blob_writer_overwrite_files': {'key': 'blobWriterOverwriteFiles', 'type': 'object'}, 'blob_writer_date_time_format': {'key': 'blobWriterDateTimeFormat', 'type': 'object'}, 'blob_writer_add_header': {'key': 'blobWriterAddHeader', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'metadata': {'key': 'metadata', 'type': '[MetadataItem]'}, } def __init__( @@ -8681,18 +8978,21 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, blob_writer_overwrite_files: Optional[object] = None, blob_writer_date_time_format: Optional[object] = None, blob_writer_add_header: Optional[object] = None, copy_behavior: Optional[object] = None, + metadata: Optional[List["MetadataItem"]] = None, **kwargs ): - super(BlobSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(BlobSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'BlobSink' # type: str self.blob_writer_overwrite_files = blob_writer_overwrite_files self.blob_writer_date_time_format = blob_writer_date_time_format self.blob_writer_add_header = blob_writer_add_header self.copy_behavior = copy_behavior + self.metadata = metadata class BlobSource(CopySource): @@ -8714,6 +9014,9 @@ class BlobSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param treat_empty_as_null: Treat empty as null. Type: boolean (or Expression with resultType boolean). :type treat_empty_as_null: object @@ -8735,6 +9038,7 @@ class BlobSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, @@ -8747,12 +9051,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, treat_empty_as_null: Optional[object] = None, skip_header_line_count: Optional[object] = None, recursive: Optional[object] = None, **kwargs ): - super(BlobSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(BlobSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'BlobSource' # type: str self.treat_empty_as_null = treat_empty_as_null self.skip_header_line_count = skip_header_line_count @@ -8775,18 +9080,18 @@ class BlobTrigger(MultiplePipelineTrigger): :type description: str :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". - :vartype runtime_state: str or ~data_factory_management_client.models.TriggerRuntimeState + :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState :param annotations: List of tags that can be used for describing the trigger. :type annotations: list[object] :param pipelines: Pipelines that need to be started. - :type pipelines: list[~data_factory_management_client.models.TriggerPipelineReference] + :type pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] :param folder_path: Required. The path of the container/folder that will trigger the pipeline. :type folder_path: str :param max_concurrency: Required. The max number of parallel files to handle when it is triggered. :type max_concurrency: int :param linked_service: Required. The Azure Storage linked service reference. - :type linked_service: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference """ _validation = { @@ -8839,11 +9144,11 @@ class CassandraLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param host: Required. Host name for connection. Type: string (or Expression with resultType @@ -8859,7 +9164,7 @@ class CassandraLinkedService(LinkedService): string). :type username: object :param password: Password for authentication. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -8931,12 +9236,15 @@ class CassandraSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: Database query. Should be a SQL-92 query expression or Cassandra Query Language (CQL) command. Type: string (or Expression with resultType string). :type query: object @@ -8947,7 +9255,7 @@ class CassandraSource(TabularSource): Possible values include: "ALL", "EACH_QUORUM", "QUORUM", "LOCAL_QUORUM", "ONE", "TWO", "THREE", "LOCAL_ONE", "SERIAL", "LOCAL_SERIAL". :type consistency_level: str or - ~data_factory_management_client.models.CassandraSourceReadConsistencyLevels + ~azure.mgmt.datafactory.models.CassandraSourceReadConsistencyLevels """ _validation = { @@ -8960,8 +9268,9 @@ class CassandraSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'consistency_level': {'key': 'consistencyLevel', 'type': 'str'}, } @@ -8973,13 +9282,14 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, consistency_level: Optional[Union[str, "CassandraSourceReadConsistencyLevels"]] = None, **kwargs ): - super(CassandraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(CassandraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'CassandraSource' # type: str self.query = query self.consistency_level = consistency_level @@ -9004,14 +9314,14 @@ class CassandraTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name of the Cassandra database. Type: string (or Expression with resultType string). :type table_name: object @@ -9076,14 +9386,14 @@ class ChainingTrigger(Trigger): :type description: str :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". - :vartype runtime_state: str or ~data_factory_management_client.models.TriggerRuntimeState + :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState :param annotations: List of tags that can be used for describing the trigger. :type annotations: list[object] :param pipeline: Required. Pipeline for which runs are created when all upstream pipelines complete successfully. - :type pipeline: ~data_factory_management_client.models.TriggerPipelineReference + :type pipeline: ~azure.mgmt.datafactory.models.TriggerPipelineReference :param depends_on: Required. Upstream Pipelines. - :type depends_on: list[~data_factory_management_client.models.PipelineReference] + :type depends_on: list[~azure.mgmt.datafactory.models.PipelineReference] :param run_dimension: Required. Run Dimension property that needs to be emitted by upstream pipelines. :type run_dimension: str @@ -9138,7 +9448,7 @@ class CloudError(msrest.serialization.Model): :param target: Property name/path in request associated with error. :type target: str :param details: Array with additional error details. - :type details: list[~data_factory_management_client.models.CloudError] + :type details: list[~azure.mgmt.datafactory.models.CloudError] """ _validation = { @@ -9181,7 +9491,7 @@ class CmdkeySetup(CustomSetupBase): :param user_name: Required. The user name of data source access. :type user_name: object :param password: Required. The password of data source access. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase """ _validation = { @@ -9254,14 +9564,14 @@ class CommonDataServiceForAppsEntityDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param entity_name: The logical name of the entity. Type: string (or Expression with resultType string). :type entity_name: object @@ -9315,18 +9625,18 @@ class CommonDataServiceForAppsLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param deployment_type: Required. The deployment type of the Common Data Service for Apps instance. 'Online' for Common Data Service for Apps Online and 'OnPremisesWithIfd' for Common Data Service for Apps on-premises with Ifd. Type: string (or Expression with resultType - string). Possible values include: "Online", "OnPremisesWithIfd". - :type deployment_type: str or ~data_factory_management_client.models.DynamicsDeploymentType + string). + :type deployment_type: object :param host_name: The host name of the on-premises Common Data Service for Apps server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). @@ -9347,30 +9657,26 @@ class CommonDataServiceForAppsLinkedService(LinkedService): :param authentication_type: Required. The authentication type to connect to Common Data Service for Apps server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario. 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or - Expression with resultType string). Possible values include: "Office365", "Ifd", - "AADServicePrincipal". - :type authentication_type: str or - ~data_factory_management_client.models.DynamicsAuthenticationType + Expression with resultType string). + :type authentication_type: object :param username: User name to access the Common Data Service for Apps instance. Type: string (or Expression with resultType string). :type username: object :param password: Password to access the Common Data Service for Apps instance. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param service_principal_id: The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string (or Expression with resultType string). :type service_principal_id: object :param service_principal_credential_type: The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' - for certificate. Type: string (or Expression with resultType string). Possible values include: - "ServicePrincipalKey", "ServicePrincipalCert". - :type service_principal_credential_type: str or - ~data_factory_management_client.models.DynamicsServicePrincipalCredentialType + for certificate. Type: string (or Expression with resultType string). + :type service_principal_credential_type: object :param service_principal_credential: The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be AzureKeyVaultSecretReference. - :type service_principal_credential: ~data_factory_management_client.models.SecretBase + :type service_principal_credential: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -9390,16 +9696,16 @@ class CommonDataServiceForAppsLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'object'}, 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, 'port': {'key': 'typeProperties.port', 'type': 'object'}, 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'}, + 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'object'}, 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -9407,8 +9713,8 @@ class CommonDataServiceForAppsLinkedService(LinkedService): def __init__( self, *, - deployment_type: Union[str, "DynamicsDeploymentType"], - authentication_type: Union[str, "DynamicsAuthenticationType"], + deployment_type: object, + authentication_type: object, additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, @@ -9421,7 +9727,7 @@ def __init__( username: Optional[object] = None, password: Optional["SecretBase"] = None, service_principal_id: Optional[object] = None, - service_principal_credential_type: Optional[Union[str, "DynamicsServicePrincipalCredentialType"]] = None, + service_principal_credential_type: Optional[object] = None, service_principal_credential: Optional["SecretBase"] = None, encrypted_credential: Optional[object] = None, **kwargs @@ -9467,9 +9773,12 @@ class CommonDataServiceForAppsSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param write_behavior: Required. The write behavior for the operation. Possible values include: "Upsert". - :type write_behavior: str or ~data_factory_management_client.models.DynamicsSinkWriteBehavior + :type write_behavior: str or ~azure.mgmt.datafactory.models.DynamicsSinkWriteBehavior :param ignore_null_values: The flag indicating whether to ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). @@ -9492,6 +9801,7 @@ class CommonDataServiceForAppsSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, 'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'}, @@ -9507,11 +9817,12 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, ignore_null_values: Optional[object] = None, alternate_key_name: Optional[object] = None, **kwargs ): - super(CommonDataServiceForAppsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(CommonDataServiceForAppsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'CommonDataServiceForAppsSink' # type: str self.write_behavior = write_behavior self.ignore_null_values = ignore_null_values @@ -9537,12 +9848,15 @@ class CommonDataServiceForAppsSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: FetchXML is a proprietary query language that is used in Microsoft Common Data Service for Apps (online & on-premises). Type: string (or Expression with resultType string). :type query: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -9555,8 +9869,9 @@ class CommonDataServiceForAppsSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -9566,11 +9881,12 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, **kwargs ): - super(CommonDataServiceForAppsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(CommonDataServiceForAppsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'CommonDataServiceForAppsSource' # type: str self.query = query self.additional_columns = additional_columns @@ -9586,7 +9902,7 @@ class ComponentSetup(CustomSetupBase): :param component_name: Required. The name of the 3rd party component. :type component_name: str :param license_key: The license key to activate the component. - :type license_key: ~data_factory_management_client.models.SecretBase + :type license_key: ~azure.mgmt.datafactory.models.SecretBase """ _validation = { @@ -9663,11 +9979,11 @@ class ConcurLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_properties: Properties used to connect to Concur. It is mutually exclusive @@ -9679,7 +9995,7 @@ class ConcurLinkedService(LinkedService): :type username: object :param password: The password corresponding to the user name that you provided in the username field. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. :type use_encrypted_endpoints: object @@ -9768,14 +10084,14 @@ class ConcurObjectDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ @@ -9836,12 +10152,15 @@ class ConcurSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -9857,8 +10176,9 @@ class ConcurSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -9869,12 +10189,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(ConcurSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(ConcurSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'ConcurSource' # type: str self.query = query @@ -9929,9 +10250,9 @@ class ControlActivity(Activity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] """ _validation = { @@ -9977,28 +10298,28 @@ class CopyActivity(ExecutionActivity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param linked_service_name: Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. - :type policy: ~data_factory_management_client.models.ActivityPolicy + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param inputs: List of inputs for the activity. - :type inputs: list[~data_factory_management_client.models.DatasetReference] + :type inputs: list[~azure.mgmt.datafactory.models.DatasetReference] :param outputs: List of outputs for the activity. - :type outputs: list[~data_factory_management_client.models.DatasetReference] + :type outputs: list[~azure.mgmt.datafactory.models.DatasetReference] :param source: Required. Copy activity source. - :type source: ~data_factory_management_client.models.CopySource + :type source: ~azure.mgmt.datafactory.models.CopySource :param sink: Required. Copy activity sink. - :type sink: ~data_factory_management_client.models.CopySink + :type sink: ~azure.mgmt.datafactory.models.CopySink :param translator: Copy activity translator. If not specified, tabular translator is used. :type translator: object :param enable_staging: Specifies whether to copy data via an interim staging. Default value is false. Type: boolean (or Expression with resultType boolean). :type enable_staging: object :param staging_settings: Specifies interim staging settings when EnableStaging is true. - :type staging_settings: ~data_factory_management_client.models.StagingSettings + :type staging_settings: ~azure.mgmt.datafactory.models.StagingSettings :param parallel_copies: Maximum number of concurrent sessions opened on the source or sink to avoid overloading the data store. Type: integer (or Expression with resultType integer), minimum: 0. @@ -10012,12 +10333,12 @@ class CopyActivity(ExecutionActivity): :param redirect_incompatible_row_settings: Redirect incompatible row settings when EnableSkipIncompatibleRow is true. :type redirect_incompatible_row_settings: - ~data_factory_management_client.models.RedirectIncompatibleRowSettings + ~azure.mgmt.datafactory.models.RedirectIncompatibleRowSettings :param log_storage_settings: (Deprecated. Please use LogSettings) Log storage settings customer need to provide when enabling session log. - :type log_storage_settings: ~data_factory_management_client.models.LogStorageSettings + :type log_storage_settings: ~azure.mgmt.datafactory.models.LogStorageSettings :param log_settings: Log settings customer needs provide when enabling log. - :type log_settings: ~data_factory_management_client.models.LogSettings + :type log_settings: ~azure.mgmt.datafactory.models.LogSettings :param preserve_rules: Preserve Rules. :type preserve_rules: list[object] :param preserve: Preserve rules. @@ -10026,7 +10347,7 @@ class CopyActivity(ExecutionActivity): (or Expression with resultType boolean). :type validate_data_consistency: object :param skip_error_file: Specify the fault tolerance for data consistency. - :type skip_error_file: ~data_factory_management_client.models.SkipErrorFile + :type skip_error_file: ~azure.mgmt.datafactory.models.SkipErrorFile """ _validation = { @@ -10192,11 +10513,11 @@ class CosmosDbLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: The connection string. Type: string, SecureString or @@ -10209,7 +10530,7 @@ class CosmosDbLinkedService(LinkedService): :type database: object :param account_key: The account key of the Azure CosmosDB account. Type: SecureString or AzureKeyVaultSecretReference. - :type account_key: ~data_factory_management_client.models.SecretBase + :type account_key: ~azure.mgmt.datafactory.models.SecretBase :param service_principal_id: The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string (or Expression with resultType string). :type service_principal_id: object @@ -10218,13 +10539,13 @@ class CosmosDbLinkedService(LinkedService): for certificate. Type: string (or Expression with resultType string). Possible values include: "ServicePrincipalKey", "ServicePrincipalCert". :type service_principal_credential_type: str or - ~data_factory_management_client.models.CosmosDbServicePrincipalCredentialType + ~azure.mgmt.datafactory.models.CosmosDbServicePrincipalCredentialType :param service_principal_credential: The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be AzureKeyVaultSecretReference. - :type service_principal_credential: ~data_factory_management_client.models.SecretBase + :type service_principal_credential: ~azure.mgmt.datafactory.models.SecretBase :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object @@ -10234,7 +10555,7 @@ class CosmosDbLinkedService(LinkedService): :type azure_cloud_type: object :param connection_mode: The connection mode used to access CosmosDB account. Type: string (or Expression with resultType string). Possible values include: "Gateway", "Direct". - :type connection_mode: str or ~data_factory_management_client.models.CosmosDbConnectionMode + :type connection_mode: str or ~azure.mgmt.datafactory.models.CosmosDbConnectionMode :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -10320,14 +10641,14 @@ class CosmosDbMongoDbApiCollectionDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param collection: Required. The collection name of the CosmosDB (MongoDB API) database. Type: string (or Expression with resultType string). :type collection: object @@ -10382,13 +10703,16 @@ class CosmosDbMongoDbApiLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] + :param is_server_version_above32: Whether the CosmosDB (MongoDB API) server version is higher + than 3.2. The default value is false. Type: boolean (or Expression with resultType boolean). + :type is_server_version_above32: object :param connection_string: Required. The CosmosDB (MongoDB API) connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. @@ -10411,6 +10735,7 @@ class CosmosDbMongoDbApiLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'is_server_version_above32': {'key': 'typeProperties.isServerVersionAbove32', 'type': 'object'}, 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'database': {'key': 'typeProperties.database', 'type': 'object'}, } @@ -10425,10 +10750,12 @@ def __init__( description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, + is_server_version_above32: Optional[object] = None, **kwargs ): super(CosmosDbMongoDbApiLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'CosmosDbMongoDbApi' # type: str + self.is_server_version_above32 = is_server_version_above32 self.connection_string = connection_string self.database = database @@ -10458,6 +10785,9 @@ class CosmosDbMongoDbApiSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param write_behavior: Specifies whether the document with same key to be overwritten (upsert) rather than throw exception (insert). The default value is "insert". Type: string (or Expression with resultType string). Type: string (or Expression with resultType string). @@ -10476,6 +10806,7 @@ class CosmosDbMongoDbApiSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } @@ -10488,10 +10819,11 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, write_behavior: Optional[object] = None, **kwargs ): - super(CosmosDbMongoDbApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(CosmosDbMongoDbApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'CosmosDbMongoDbApiSink' # type: str self.write_behavior = write_behavior @@ -10515,12 +10847,15 @@ class CosmosDbMongoDbApiSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param filter: Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). :type filter: object :param cursor_methods: Cursor methods for Mongodb query. - :type cursor_methods: ~data_factory_management_client.models.MongoDbCursorMethodsProperties + :type cursor_methods: ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties :param batch_size: Specifies the number of documents to return in each batch of the response from MongoDB instance. In most cases, modifying the batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response size. @@ -10530,8 +10865,8 @@ class CosmosDbMongoDbApiSource(CopySource): pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -10544,11 +10879,12 @@ class CosmosDbMongoDbApiSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'filter': {'key': 'filter', 'type': 'object'}, 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -10558,14 +10894,15 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, filter: Optional[object] = None, cursor_methods: Optional["MongoDbCursorMethodsProperties"] = None, batch_size: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, **kwargs ): - super(CosmosDbMongoDbApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(CosmosDbMongoDbApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'CosmosDbMongoDbApiSource' # type: str self.filter = filter self.cursor_methods = cursor_methods @@ -10593,14 +10930,14 @@ class CosmosDbSqlApiCollectionDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param collection_name: Required. CosmosDB (SQL API) collection name. Type: string (or Expression with resultType string). :type collection_name: object @@ -10669,6 +11006,9 @@ class CosmosDbSqlApiSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param write_behavior: Describes how to write data to Azure Cosmos DB. Type: string (or Expression with resultType string). Allowed values: insert and upsert. :type write_behavior: object @@ -10686,6 +11026,7 @@ class CosmosDbSqlApiSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } @@ -10698,10 +11039,11 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, write_behavior: Optional[object] = None, **kwargs ): - super(CosmosDbSqlApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(CosmosDbSqlApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'CosmosDbSqlApiSink' # type: str self.write_behavior = write_behavior @@ -10725,6 +11067,9 @@ class CosmosDbSqlApiSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: SQL API query. Type: string (or Expression with resultType string). :type query: object :param page_size: Page size of the result. Type: integer (or Expression with resultType @@ -10737,8 +11082,8 @@ class CosmosDbSqlApiSource(CopySource): Expression with resultType boolean). :type detect_datetime: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -10751,11 +11096,12 @@ class CosmosDbSqlApiSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'page_size': {'key': 'pageSize', 'type': 'object'}, 'preferred_regions': {'key': 'preferredRegions', 'type': 'object'}, 'detect_datetime': {'key': 'detectDatetime', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -10765,14 +11111,15 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query: Optional[object] = None, page_size: Optional[object] = None, preferred_regions: Optional[object] = None, detect_datetime: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, **kwargs ): - super(CosmosDbSqlApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(CosmosDbSqlApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'CosmosDbSqlApiSource' # type: str self.query = query self.page_size = page_size @@ -10792,18 +11139,18 @@ class CouchbaseLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param cred_string: The Azure key vault secret reference of credString in connection string. - :type cred_string: ~data_factory_management_client.models.AzureKeyVaultSecretReference + :type cred_string: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -10865,12 +11212,15 @@ class CouchbaseSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -10886,8 +11236,9 @@ class CouchbaseSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -10898,12 +11249,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(CouchbaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(CouchbaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'CouchbaseSource' # type: str self.query = query @@ -10927,14 +11279,14 @@ class CouchbaseTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ @@ -10988,8 +11340,7 @@ class CreateDataFlowDebugSessionRequest(msrest.serialization.Model): :param time_to_live: Time to live setting of the cluster in minutes. :type time_to_live: int :param integration_runtime: Set to use integration runtime setting for data flow debug session. - :type integration_runtime: - ~data_factory_management_client.models.IntegrationRuntimeDebugResource + :type integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeDebugResource """ _attribute_map = { @@ -11107,6 +11458,181 @@ def __init__( self.run_id = run_id +class Credential(msrest.serialization.Model): + """The Azure Data Factory nested object which contains the information and credential which can be used to connect with related store or compute resource. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: ManagedIdentityCredential, ServicePrincipalCredential. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of credential.Constant filled by server. + :type type: str + :param description: Credential description. + :type description: str + :param annotations: List of tags that can be used for describing the Credential. + :type annotations: list[object] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + } + + _subtype_map = { + 'type': {'ManagedIdentity': 'ManagedIdentityCredential', 'ServicePrincipal': 'ServicePrincipalCredential'} + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + annotations: Optional[List[object]] = None, + **kwargs + ): + super(Credential, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = 'Credential' # type: str + self.description = description + self.annotations = annotations + + +class CredentialReference(msrest.serialization.Model): + """Credential reference type. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :ivar type: Required. Credential reference type. Default value: "CredentialReference". + :vartype type: str + :param reference_name: Required. Reference credential name. + :type reference_name: str + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + } + + type = "CredentialReference" + + def __init__( + self, + *, + reference_name: str, + additional_properties: Optional[Dict[str, object]] = None, + **kwargs + ): + super(CredentialReference, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.reference_name = reference_name + + +class SubResource(msrest.serialization.Model): + """Azure Data Factory nested resource, which belongs to a factory. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SubResource, self).__init__(**kwargs) + self.id = None + self.name = None + self.type = None + self.etag = None + + +class CredentialResource(SubResource): + """Credential resource type. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Required. Properties of credentials. + :type properties: ~azure.mgmt.datafactory.models.Credential + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'Credential'}, + } + + def __init__( + self, + *, + properties: "Credential", + **kwargs + ): + super(CredentialResource, self).__init__(**kwargs) + self.properties = properties + + class CustomActivity(ExecutionActivity): """Custom activity type. @@ -11122,23 +11648,23 @@ class CustomActivity(ExecutionActivity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param linked_service_name: Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. - :type policy: ~data_factory_management_client.models.ActivityPolicy + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param command: Required. Command for custom activity Type: string (or Expression with resultType string). :type command: object :param resource_linked_service: Resource linked service reference. - :type resource_linked_service: ~data_factory_management_client.models.LinkedServiceReference + :type resource_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :param folder_path: Folder path for resource files Type: string (or Expression with resultType string). :type folder_path: object :param reference_objects: Reference objects. - :type reference_objects: ~data_factory_management_client.models.CustomActivityReferenceObject + :type reference_objects: ~azure.mgmt.datafactory.models.CustomActivityReferenceObject :param extended_properties: User defined property bag. There is no restriction on the keys or values that can be used. The user specified custom activity has the full responsibility to consume and interpret the content defined. @@ -11209,9 +11735,9 @@ class CustomActivityReferenceObject(msrest.serialization.Model): """Reference objects for custom activity. :param linked_services: Linked service references. - :type linked_services: list[~data_factory_management_client.models.LinkedServiceReference] + :type linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] :param datasets: Dataset references. - :type datasets: list[~data_factory_management_client.models.DatasetReference] + :type datasets: list[~azure.mgmt.datafactory.models.DatasetReference] """ _attribute_map = { @@ -11250,14 +11776,14 @@ class CustomDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type_properties: Custom dataset properties. :type type_properties: object """ @@ -11310,11 +11836,11 @@ class CustomDataSourceLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type_properties: Required. Custom linked service properties. @@ -11368,11 +11894,11 @@ class CustomEventsTrigger(MultiplePipelineTrigger): :type description: str :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". - :vartype runtime_state: str or ~data_factory_management_client.models.TriggerRuntimeState + :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState :param annotations: List of tags that can be used for describing the trigger. :type annotations: list[object] :param pipelines: Pipelines that need to be started. - :type pipelines: list[~data_factory_management_client.models.TriggerPipelineReference] + :type pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] :param subject_begins_with: The event subject must begin with the pattern provided for trigger to fire. At least one of these must be provided: subjectBeginsWith, subjectEndsWith. :type subject_begins_with: str @@ -11441,13 +11967,13 @@ class DatabricksNotebookActivity(ExecutionActivity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param linked_service_name: Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. - :type policy: ~data_factory_management_client.models.ActivityPolicy + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param notebook_path: Required. The absolute path of the notebook to be run in the Databricks Workspace. This path must begin with a slash. Type: string (or Expression with resultType string). @@ -11516,13 +12042,13 @@ class DatabricksSparkJarActivity(ExecutionActivity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param linked_service_name: Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. - :type policy: ~data_factory_management_client.models.ActivityPolicy + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param main_class_name: Required. The full name of the class containing the main method to be executed. This class must be contained in a JAR provided as a library. Type: string (or Expression with resultType string). @@ -11590,13 +12116,13 @@ class DatabricksSparkPythonActivity(ExecutionActivity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param linked_service_name: Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. - :type policy: ~data_factory_management_client.models.ActivityPolicy + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param python_file: Required. The URI of the Python file to be executed. DBFS paths are supported. Type: string (or Expression with resultType string). :type python_file: object @@ -11654,7 +12180,9 @@ class DataFlow(msrest.serialization.Model): You probably want to use the sub-classes and not this class directly. Known sub-classes are: MappingDataFlow. - :param type: Type of data flow.Constant filled by server. + All required parameters must be populated in order to send to Azure. + + :param type: Required. Type of data flow.Constant filled by server. :type type: str :param description: The description of the data flow. :type description: str @@ -11662,9 +12190,13 @@ class DataFlow(msrest.serialization.Model): :type annotations: list[object] :param folder: The folder that this data flow is in. If not specified, Data flow will appear at the root level. - :type folder: ~data_factory_management_client.models.DataFlowFolder + :type folder: ~azure.mgmt.datafactory.models.DataFlowFolder """ + _validation = { + 'type': {'required': True}, + } + _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, @@ -11740,9 +12272,9 @@ class DataFlowDebugCommandRequest(msrest.serialization.Model): :type session_id: str :param command: The command type. Possible values include: "executePreviewQuery", "executeStatisticsQuery", "executeExpressionQuery". - :type command: str or ~data_factory_management_client.models.DataFlowDebugCommandType + :type command: str or ~azure.mgmt.datafactory.models.DataFlowDebugCommandType :param command_payload: The command payload object. - :type command_payload: ~data_factory_management_client.models.DataFlowDebugCommandPayload + :type command_payload: ~azure.mgmt.datafactory.models.DataFlowDebugCommandPayload """ _attribute_map = { @@ -11800,15 +12332,15 @@ class DataFlowDebugPackage(msrest.serialization.Model): :param session_id: The ID of data flow debug session. :type session_id: str :param data_flow: Data flow instance. - :type data_flow: ~data_factory_management_client.models.DataFlowDebugResource + :type data_flow: ~azure.mgmt.datafactory.models.DataFlowDebugResource :param datasets: List of datasets. - :type datasets: list[~data_factory_management_client.models.DatasetDebugResource] + :type datasets: list[~azure.mgmt.datafactory.models.DatasetDebugResource] :param linked_services: List of linked services. - :type linked_services: list[~data_factory_management_client.models.LinkedServiceDebugResource] + :type linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceDebugResource] :param staging: Staging info for debug session. - :type staging: ~data_factory_management_client.models.DataFlowStagingInfo + :type staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo :param debug_settings: Data flow debug settings. - :type debug_settings: ~data_factory_management_client.models.DataFlowDebugPackageDebugSettings + :type debug_settings: ~azure.mgmt.datafactory.models.DataFlowDebugPackageDebugSettings """ _attribute_map = { @@ -11847,7 +12379,7 @@ class DataFlowDebugPackageDebugSettings(msrest.serialization.Model): """Data flow debug settings. :param source_settings: Source setting for data flow debug. - :type source_settings: list[~data_factory_management_client.models.DataFlowSourceSetting] + :type source_settings: list[~azure.mgmt.datafactory.models.DataFlowSourceSetting] :param parameters: Data flow parameters. :type parameters: dict[str, object] :param dataset_parameters: Parameters for dataset. @@ -11903,7 +12435,7 @@ class DataFlowDebugResource(SubResourceDebugResource): :param name: The resource name. :type name: str :param properties: Required. Data flow properties. - :type properties: ~data_factory_management_client.models.DataFlow + :type properties: ~azure.mgmt.datafactory.models.DataFlow """ _validation = { @@ -12020,7 +12552,7 @@ class DataFlowListResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. :param value: Required. List of data flows. - :type value: list[~data_factory_management_client.models.DataFlowResource] + :type value: list[~azure.mgmt.datafactory.models.DataFlowResource] :param next_link: The link to the next page of results, if any remaining results exist. :type next_link: str """ @@ -12092,46 +12624,6 @@ def __init__( self.dataset_parameters = dataset_parameters -class SubResource(msrest.serialization.Model): - """Azure Data Factory nested resource, which belongs to a factory. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(SubResource, self).__init__(**kwargs) - self.id = None - self.name = None - self.type = None - self.etag = None - - class DataFlowResource(SubResource): """Data flow resource type. @@ -12148,7 +12640,7 @@ class DataFlowResource(SubResource): :ivar etag: Etag identifies change in the resource. :vartype etag: str :param properties: Required. Data flow properties. - :type properties: ~data_factory_management_client.models.DataFlow + :type properties: ~azure.mgmt.datafactory.models.DataFlow """ _validation = { @@ -12219,11 +12711,11 @@ class DataFlowSink(Transformation): :param description: Transformation description. :type description: str :param dataset: Dataset reference. - :type dataset: ~data_factory_management_client.models.DatasetReference + :type dataset: ~azure.mgmt.datafactory.models.DatasetReference :param linked_service: Linked service reference. - :type linked_service: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :param schema_linked_service: Schema linked service reference. - :type schema_linked_service: ~data_factory_management_client.models.LinkedServiceReference + :type schema_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference """ _validation = { @@ -12264,11 +12756,11 @@ class DataFlowSource(Transformation): :param description: Transformation description. :type description: str :param dataset: Dataset reference. - :type dataset: ~data_factory_management_client.models.DatasetReference + :type dataset: ~azure.mgmt.datafactory.models.DatasetReference :param linked_service: Linked service reference. - :type linked_service: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :param schema_linked_service: Schema linked service reference. - :type schema_linked_service: ~data_factory_management_client.models.LinkedServiceReference + :type schema_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference """ _validation = { @@ -12335,7 +12827,7 @@ class DataFlowStagingInfo(msrest.serialization.Model): """Staging info for execute data flow activity. :param linked_service: Staging linked service reference. - :type linked_service: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :param folder_path: Folder path for staging blob. Type: string (or Expression with resultType string). :type folder_path: object @@ -12373,18 +12865,18 @@ class DataLakeAnalyticsUsqlActivity(ExecutionActivity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param linked_service_name: Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. - :type policy: ~data_factory_management_client.models.ActivityPolicy + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param script_path: Required. Case-sensitive path to folder that contains the U-SQL script. Type: string (or Expression with resultType string). :type script_path: object :param script_linked_service: Required. Script linked service reference. - :type script_linked_service: ~data_factory_management_client.models.LinkedServiceReference + :type script_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :param degree_of_parallelism: The maximum number of nodes simultaneously used to run the job. Default value is 1. Type: integer (or Expression with resultType integer), minimum: 1. :type degree_of_parallelism: object @@ -12468,8 +12960,9 @@ class DatasetCompression(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset compression.Constant filled by server. - :type type: str + :param type: Required. Type of dataset compression. Type: string (or Expression with resultType + string).Constant filled by server. + :type type: object """ _validation = { @@ -12478,7 +12971,7 @@ class DatasetCompression(msrest.serialization.Model): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'object'}, } _subtype_map = { @@ -12504,8 +12997,9 @@ class DatasetBZip2Compression(DatasetCompression): :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset compression.Constant filled by server. - :type type: str + :param type: Required. Type of dataset compression. Type: string (or Expression with resultType + string).Constant filled by server. + :type type: object """ _validation = { @@ -12514,7 +13008,7 @@ class DatasetBZip2Compression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'object'}, } def __init__( @@ -12561,7 +13055,7 @@ class DatasetDebugResource(SubResourceDebugResource): :param name: The resource name. :type name: str :param properties: Required. Dataset properties. - :type properties: ~data_factory_management_client.models.Dataset + :type properties: ~azure.mgmt.datafactory.models.Dataset """ _validation = { @@ -12592,10 +13086,11 @@ class DatasetDeflateCompression(DatasetCompression): :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset compression.Constant filled by server. - :type type: str - :param level: The Deflate compression level. Possible values include: "Optimal", "Fastest". - :type level: str or ~data_factory_management_client.models.DatasetCompressionLevel + :param type: Required. Type of dataset compression. Type: string (or Expression with resultType + string).Constant filled by server. + :type type: object + :param level: The Deflate compression level. + :type level: object """ _validation = { @@ -12604,15 +13099,15 @@ class DatasetDeflateCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'object'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__( self, *, additional_properties: Optional[Dict[str, object]] = None, - level: Optional[Union[str, "DatasetCompressionLevel"]] = None, + level: Optional[object] = None, **kwargs ): super(DatasetDeflateCompression, self).__init__(additional_properties=additional_properties, **kwargs) @@ -12649,10 +13144,11 @@ class DatasetGZipCompression(DatasetCompression): :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset compression.Constant filled by server. - :type type: str - :param level: The GZip compression level. Possible values include: "Optimal", "Fastest". - :type level: str or ~data_factory_management_client.models.DatasetCompressionLevel + :param type: Required. Type of dataset compression. Type: string (or Expression with resultType + string).Constant filled by server. + :type type: object + :param level: The GZip compression level. + :type level: object """ _validation = { @@ -12661,15 +13157,15 @@ class DatasetGZipCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'object'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__( self, *, additional_properties: Optional[Dict[str, object]] = None, - level: Optional[Union[str, "DatasetCompressionLevel"]] = None, + level: Optional[object] = None, **kwargs ): super(DatasetGZipCompression, self).__init__(additional_properties=additional_properties, **kwargs) @@ -12683,7 +13179,7 @@ class DatasetListResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. :param value: Required. List of datasets. - :type value: list[~data_factory_management_client.models.DatasetResource] + :type value: list[~azure.mgmt.datafactory.models.DatasetResource] :param next_link: The link to the next page of results, if any remaining results exist. :type next_link: str """ @@ -12765,7 +13261,7 @@ class DatasetResource(SubResource): :ivar etag: Etag identifies change in the resource. :vartype etag: str :param properties: Required. Dataset properties. - :type properties: ~data_factory_management_client.models.Dataset + :type properties: ~azure.mgmt.datafactory.models.Dataset """ _validation = { @@ -12834,8 +13330,9 @@ class DatasetTarCompression(DatasetCompression): :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset compression.Constant filled by server. - :type type: str + :param type: Required. Type of dataset compression. Type: string (or Expression with resultType + string).Constant filled by server. + :type type: object """ _validation = { @@ -12844,7 +13341,7 @@ class DatasetTarCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'object'}, } def __init__( @@ -12865,10 +13362,11 @@ class DatasetTarGZipCompression(DatasetCompression): :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset compression.Constant filled by server. - :type type: str - :param level: The TarGZip compression level. Possible values include: "Optimal", "Fastest". - :type level: str or ~data_factory_management_client.models.DatasetCompressionLevel + :param type: Required. Type of dataset compression. Type: string (or Expression with resultType + string).Constant filled by server. + :type type: object + :param level: The TarGZip compression level. + :type level: object """ _validation = { @@ -12877,15 +13375,15 @@ class DatasetTarGZipCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'object'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__( self, *, additional_properties: Optional[Dict[str, object]] = None, - level: Optional[Union[str, "DatasetCompressionLevel"]] = None, + level: Optional[object] = None, **kwargs ): super(DatasetTarGZipCompression, self).__init__(additional_properties=additional_properties, **kwargs) @@ -12901,10 +13399,11 @@ class DatasetZipDeflateCompression(DatasetCompression): :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset compression.Constant filled by server. - :type type: str - :param level: The ZipDeflate compression level. Possible values include: "Optimal", "Fastest". - :type level: str or ~data_factory_management_client.models.DatasetCompressionLevel + :param type: Required. Type of dataset compression. Type: string (or Expression with resultType + string).Constant filled by server. + :type type: object + :param level: The ZipDeflate compression level. + :type level: object """ _validation = { @@ -12913,15 +13412,15 @@ class DatasetZipDeflateCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'object'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__( self, *, additional_properties: Optional[Dict[str, object]] = None, - level: Optional[Union[str, "DatasetCompressionLevel"]] = None, + level: Optional[object] = None, **kwargs ): super(DatasetZipDeflateCompression, self).__init__(additional_properties=additional_properties, **kwargs) @@ -12940,11 +13439,11 @@ class Db2LinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: The connection string. It is mutually exclusive with server, @@ -12959,12 +13458,12 @@ class Db2LinkedService(LinkedService): :type database: object :param authentication_type: AuthenticationType to be used for connection. It is mutually exclusive with connectionString property. Possible values include: "Basic". - :type authentication_type: str or ~data_factory_management_client.models.Db2AuthenticationType + :type authentication_type: str or ~azure.mgmt.datafactory.models.Db2AuthenticationType :param username: Username for authentication. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). :type username: object :param password: Password for authentication. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param package_collection: Under where packages are created when querying database. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). @@ -13051,12 +13550,15 @@ class Db2Source(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object """ @@ -13071,8 +13573,9 @@ class Db2Source(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -13083,12 +13586,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(Db2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(Db2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'Db2Source' # type: str self.query = query @@ -13112,14 +13616,14 @@ class Db2TableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. :type table_name: object @@ -13188,13 +13692,13 @@ class DeleteActivity(ExecutionActivity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param linked_service_name: Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. - :type policy: ~data_factory_management_client.models.ActivityPolicy + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param recursive: If true, files or sub-folders under current folder path will be deleted recursively. Default is false. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -13206,11 +13710,11 @@ class DeleteActivity(ExecutionActivity): :type enable_logging: object :param log_storage_settings: Log storage settings customer need to provide when enableLogging is true. - :type log_storage_settings: ~data_factory_management_client.models.LogStorageSettings + :type log_storage_settings: ~azure.mgmt.datafactory.models.LogStorageSettings :param dataset: Required. Delete activity dataset reference. - :type dataset: ~data_factory_management_client.models.DatasetReference + :type dataset: ~azure.mgmt.datafactory.models.DatasetReference :param store_settings: Delete activity store settings. - :type store_settings: ~data_factory_management_client.models.StoreReadSettings + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings """ _validation = { @@ -13305,16 +13809,16 @@ class DelimitedTextDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param location: The location of the delimited text storage. - :type location: ~data_factory_management_client.models.DatasetLocation + :type location: ~azure.mgmt.datafactory.models.DatasetLocation :param column_delimiter: The column delimiter. Type: string (or Expression with resultType string). :type column_delimiter: object @@ -13326,12 +13830,11 @@ class DelimitedTextDataset(Dataset): https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). :type encoding_name: object - :param compression_codec: Possible values include: "none", "gzip", "snappy", "lzo", "bzip2", - "deflate", "zipDeflate", "lz4", "tar", "tarGZip". - :type compression_codec: str or ~data_factory_management_client.models.CompressionCodec - :param compression_level: The data compression method used for DelimitedText. Possible values - include: "Optimal", "Fastest". - :type compression_level: str or ~data_factory_management_client.models.DatasetCompressionLevel + :param compression_codec: The data compressionCodec. Type: string (or Expression with + resultType string). + :type compression_codec: object + :param compression_level: The data compression method used for DelimitedText. + :type compression_level: object :param quote_char: The quote character. Type: string (or Expression with resultType string). :type quote_char: object :param escape_char: The escape character. Type: string (or Expression with resultType string). @@ -13363,8 +13866,8 @@ class DelimitedTextDataset(Dataset): 'column_delimiter': {'key': 'typeProperties.columnDelimiter', 'type': 'object'}, 'row_delimiter': {'key': 'typeProperties.rowDelimiter', 'type': 'object'}, 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, - 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'str'}, - 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'str'}, + 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, + 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'object'}, 'quote_char': {'key': 'typeProperties.quoteChar', 'type': 'object'}, 'escape_char': {'key': 'typeProperties.escapeChar', 'type': 'object'}, 'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'}, @@ -13386,8 +13889,8 @@ def __init__( column_delimiter: Optional[object] = None, row_delimiter: Optional[object] = None, encoding_name: Optional[object] = None, - compression_codec: Optional[Union[str, "CompressionCodec"]] = None, - compression_level: Optional[Union[str, "DatasetCompressionLevel"]] = None, + compression_codec: Optional[object] = None, + compression_level: Optional[object] = None, quote_char: Optional[object] = None, escape_char: Optional[object] = None, first_row_as_header: Optional[object] = None, @@ -13422,7 +13925,7 @@ class DelimitedTextReadSettings(FormatReadSettings): input files. Type: integer (or Expression with resultType integer). :type skip_line_count: object :param compression_properties: Compression settings. - :type compression_properties: ~data_factory_management_client.models.CompressionReadSettings + :type compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings """ _validation = { @@ -13475,10 +13978,13 @@ class DelimitedTextSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: DelimitedText store settings. - :type store_settings: ~data_factory_management_client.models.StoreWriteSettings + :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings :param format_settings: DelimitedText format settings. - :type format_settings: ~data_factory_management_client.models.DelimitedTextWriteSettings + :type format_settings: ~azure.mgmt.datafactory.models.DelimitedTextWriteSettings """ _validation = { @@ -13493,6 +13999,7 @@ class DelimitedTextSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextWriteSettings'}, } @@ -13506,11 +14013,12 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, store_settings: Optional["StoreWriteSettings"] = None, format_settings: Optional["DelimitedTextWriteSettings"] = None, **kwargs ): - super(DelimitedTextSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(DelimitedTextSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'DelimitedTextSink' # type: str self.store_settings = store_settings self.format_settings = format_settings @@ -13535,13 +14043,16 @@ class DelimitedTextSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: DelimitedText store settings. - :type store_settings: ~data_factory_management_client.models.StoreReadSettings + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :param format_settings: DelimitedText format settings. - :type format_settings: ~data_factory_management_client.models.DelimitedTextReadSettings + :type format_settings: ~azure.mgmt.datafactory.models.DelimitedTextReadSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -13554,9 +14065,10 @@ class DelimitedTextSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextReadSettings'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -13566,12 +14078,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, store_settings: Optional["StoreReadSettings"] = None, format_settings: Optional["DelimitedTextReadSettings"] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, **kwargs ): - super(DelimitedTextSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(DelimitedTextSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'DelimitedTextSource' # type: str self.store_settings = store_settings self.format_settings = format_settings @@ -13728,14 +14241,14 @@ class DocumentDbCollectionDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param collection_name: Required. Document Database collection name. Type: string (or Expression with resultType string). :type collection_name: object @@ -13804,6 +14317,9 @@ class DocumentDbCollectionSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param nesting_separator: Nested properties separator. Default is . (dot). Type: string (or Expression with resultType string). :type nesting_separator: object @@ -13824,6 +14340,7 @@ class DocumentDbCollectionSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } @@ -13837,11 +14354,12 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, nesting_separator: Optional[object] = None, write_behavior: Optional[object] = None, **kwargs ): - super(DocumentDbCollectionSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(DocumentDbCollectionSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'DocumentDbCollectionSink' # type: str self.nesting_separator = nesting_separator self.write_behavior = write_behavior @@ -13866,6 +14384,9 @@ class DocumentDbCollectionSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: Documents query. Type: string (or Expression with resultType string). :type query: object :param nesting_separator: Nested properties separator. Type: string (or Expression with @@ -13875,8 +14396,8 @@ class DocumentDbCollectionSource(CopySource): pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -13889,10 +14410,11 @@ class DocumentDbCollectionSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -13902,13 +14424,14 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query: Optional[object] = None, nesting_separator: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, **kwargs ): - super(DocumentDbCollectionSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(DocumentDbCollectionSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'DocumentDbCollectionSource' # type: str self.query = query self.nesting_separator = nesting_separator @@ -13927,18 +14450,18 @@ class DrillLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param pwd: The Azure key vault secret reference of password in connection string. - :type pwd: ~data_factory_management_client.models.AzureKeyVaultSecretReference + :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -14000,12 +14523,15 @@ class DrillSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -14021,8 +14547,9 @@ class DrillSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -14033,12 +14560,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(DrillSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(DrillSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'DrillSource' # type: str self.query = query @@ -14062,14 +14590,14 @@ class DrillTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. :type table_name: object @@ -14157,7 +14685,7 @@ class DwCopyCommandSettings(msrest.serialization.Model): default values in the property overwrite the DEFAULT constraint set in the DB, and identity column cannot have a default value. Type: array of objects (or Expression with resultType array of objects). - :type default_values: list[~data_factory_management_client.models.DwCopyCommandDefaultValue] + :type default_values: list[~azure.mgmt.datafactory.models.DwCopyCommandDefaultValue] :param additional_options: Additional options directly passed to SQL DW in Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalOptions": { "MAXERRORS": "1000", "DATEFORMAT": "'ymd'" }. @@ -14192,11 +14720,11 @@ class DynamicsAxLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param url: Required. The Dynamics AX (or Dynamics 365 Finance and Operations) instance OData @@ -14208,7 +14736,7 @@ class DynamicsAxLinkedService(LinkedService): :param service_principal_key: Required. Specify the application's key. Mark this field as a SecureString to store it securely in Data Factory, or reference a secret stored in Azure Key Vault. Type: string (or Expression with resultType string). - :type service_principal_key: ~data_factory_management_client.models.SecretBase + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: Required. Specify the tenant information (domain name or tenant ID) under which your application resides. Retrieve it by hovering the mouse in the top-right corner of the Azure portal. Type: string (or Expression with resultType string). @@ -14291,14 +14819,14 @@ class DynamicsAxResourceDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param path: Required. The path of the Dynamics AX OData entity. Type: string (or Expression with resultType string). :type path: object @@ -14361,12 +14889,15 @@ class DynamicsAxSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -14387,8 +14918,9 @@ class DynamicsAxSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } @@ -14400,13 +14932,14 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, http_request_timeout: Optional[object] = None, **kwargs ): - super(DynamicsAxSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(DynamicsAxSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'DynamicsAXSource' # type: str self.query = query self.http_request_timeout = http_request_timeout @@ -14431,14 +14964,14 @@ class DynamicsCrmEntityDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param entity_name: The logical name of the entity. Type: string (or Expression with resultType string). :type entity_name: object @@ -14492,18 +15025,17 @@ class DynamicsCrmLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param deployment_type: Required. The deployment type of the Dynamics CRM instance. 'Online' for Dynamics CRM Online and 'OnPremisesWithIfd' for Dynamics CRM on-premises with Ifd. Type: - string (or Expression with resultType string). Possible values include: "Online", - "OnPremisesWithIfd". - :type deployment_type: str or ~data_factory_management_client.models.DynamicsDeploymentType + string (or Expression with resultType string). + :type deployment_type: object :param host_name: The host name of the on-premises Dynamics CRM server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). @@ -14522,30 +15054,26 @@ class DynamicsCrmLinkedService(LinkedService): :param authentication_type: Required. The authentication type to connect to Dynamics CRM server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or - Expression with resultType string). Possible values include: "Office365", "Ifd", - "AADServicePrincipal". - :type authentication_type: str or - ~data_factory_management_client.models.DynamicsAuthenticationType + Expression with resultType string). + :type authentication_type: object :param username: User name to access the Dynamics CRM instance. Type: string (or Expression with resultType string). :type username: object :param password: Password to access the Dynamics CRM instance. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param service_principal_id: The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string (or Expression with resultType string). :type service_principal_id: object :param service_principal_credential_type: The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' - for certificate. Type: string (or Expression with resultType string). Possible values include: - "ServicePrincipalKey", "ServicePrincipalCert". - :type service_principal_credential_type: str or - ~data_factory_management_client.models.DynamicsServicePrincipalCredentialType + for certificate. Type: string (or Expression with resultType string). + :type service_principal_credential_type: object :param service_principal_credential: The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be AzureKeyVaultSecretReference. - :type service_principal_credential: ~data_factory_management_client.models.SecretBase + :type service_principal_credential: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -14565,16 +15093,16 @@ class DynamicsCrmLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'object'}, 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, 'port': {'key': 'typeProperties.port', 'type': 'object'}, 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'}, + 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'object'}, 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -14582,8 +15110,8 @@ class DynamicsCrmLinkedService(LinkedService): def __init__( self, *, - deployment_type: Union[str, "DynamicsDeploymentType"], - authentication_type: Union[str, "DynamicsAuthenticationType"], + deployment_type: object, + authentication_type: object, additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, @@ -14596,7 +15124,7 @@ def __init__( username: Optional[object] = None, password: Optional["SecretBase"] = None, service_principal_id: Optional[object] = None, - service_principal_credential_type: Optional[Union[str, "DynamicsServicePrincipalCredentialType"]] = None, + service_principal_credential_type: Optional[object] = None, service_principal_credential: Optional["SecretBase"] = None, encrypted_credential: Optional[object] = None, **kwargs @@ -14642,9 +15170,12 @@ class DynamicsCrmSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param write_behavior: Required. The write behavior for the operation. Possible values include: "Upsert". - :type write_behavior: str or ~data_factory_management_client.models.DynamicsSinkWriteBehavior + :type write_behavior: str or ~azure.mgmt.datafactory.models.DynamicsSinkWriteBehavior :param ignore_null_values: The flag indicating whether to ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). @@ -14667,6 +15198,7 @@ class DynamicsCrmSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, 'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'}, @@ -14682,11 +15214,12 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, ignore_null_values: Optional[object] = None, alternate_key_name: Optional[object] = None, **kwargs ): - super(DynamicsCrmSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(DynamicsCrmSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'DynamicsCrmSink' # type: str self.write_behavior = write_behavior self.ignore_null_values = ignore_null_values @@ -14712,12 +15245,15 @@ class DynamicsCrmSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: FetchXML is a proprietary query language that is used in Microsoft Dynamics CRM (online & on-premises). Type: string (or Expression with resultType string). :type query: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -14730,8 +15266,9 @@ class DynamicsCrmSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -14741,11 +15278,12 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, **kwargs ): - super(DynamicsCrmSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(DynamicsCrmSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'DynamicsCrmSource' # type: str self.query = query self.additional_columns = additional_columns @@ -14770,14 +15308,14 @@ class DynamicsEntityDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param entity_name: The logical name of the entity. Type: string (or Expression with resultType string). :type entity_name: object @@ -14831,17 +15369,17 @@ class DynamicsLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param deployment_type: Required. The deployment type of the Dynamics instance. 'Online' for Dynamics Online and 'OnPremisesWithIfd' for Dynamics on-premises with Ifd. Type: string (or - Expression with resultType string). Possible values include: "Online", "OnPremisesWithIfd". - :type deployment_type: str or ~data_factory_management_client.models.DynamicsDeploymentType + Expression with resultType string). + :type deployment_type: object :param host_name: The host name of the on-premises Dynamics server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). :type host_name: object @@ -14859,29 +15397,26 @@ class DynamicsLinkedService(LinkedService): :param authentication_type: Required. The authentication type to connect to Dynamics server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or Expression with - resultType string). Possible values include: "Office365", "Ifd", "AADServicePrincipal". - :type authentication_type: str or - ~data_factory_management_client.models.DynamicsAuthenticationType + resultType string). + :type authentication_type: object :param username: User name to access the Dynamics instance. Type: string (or Expression with resultType string). :type username: object :param password: Password to access the Dynamics instance. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param service_principal_id: The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string (or Expression with resultType string). :type service_principal_id: object :param service_principal_credential_type: The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' - for certificate. Type: string (or Expression with resultType string). Possible values include: - "ServicePrincipalKey", "ServicePrincipalCert". - :type service_principal_credential_type: str or - ~data_factory_management_client.models.DynamicsServicePrincipalCredentialType + for certificate. Type: string (or Expression with resultType string). + :type service_principal_credential_type: str :param service_principal_credential: The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be AzureKeyVaultSecretReference. - :type service_principal_credential: ~data_factory_management_client.models.SecretBase + :type service_principal_credential: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -14901,12 +15436,12 @@ class DynamicsLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'object'}, 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, 'port': {'key': 'typeProperties.port', 'type': 'object'}, 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, @@ -14918,8 +15453,8 @@ class DynamicsLinkedService(LinkedService): def __init__( self, *, - deployment_type: Union[str, "DynamicsDeploymentType"], - authentication_type: Union[str, "DynamicsAuthenticationType"], + deployment_type: object, + authentication_type: object, additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, @@ -14932,7 +15467,7 @@ def __init__( username: Optional[object] = None, password: Optional["SecretBase"] = None, service_principal_id: Optional[object] = None, - service_principal_credential_type: Optional[Union[str, "DynamicsServicePrincipalCredentialType"]] = None, + service_principal_credential_type: Optional[str] = None, service_principal_credential: Optional["SecretBase"] = None, encrypted_credential: Optional[object] = None, **kwargs @@ -14978,9 +15513,12 @@ class DynamicsSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param write_behavior: Required. The write behavior for the operation. Possible values include: "Upsert". - :type write_behavior: str or ~data_factory_management_client.models.DynamicsSinkWriteBehavior + :type write_behavior: str or ~azure.mgmt.datafactory.models.DynamicsSinkWriteBehavior :param ignore_null_values: The flag indicating whether ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). @@ -15003,6 +15541,7 @@ class DynamicsSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, 'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'}, @@ -15018,11 +15557,12 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, ignore_null_values: Optional[object] = None, alternate_key_name: Optional[object] = None, **kwargs ): - super(DynamicsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(DynamicsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'DynamicsSink' # type: str self.write_behavior = write_behavior self.ignore_null_values = ignore_null_values @@ -15048,12 +15588,15 @@ class DynamicsSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: FetchXML is a proprietary query language that is used in Microsoft Dynamics (online & on-premises). Type: string (or Expression with resultType string). :type query: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -15066,8 +15609,9 @@ class DynamicsSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -15077,11 +15621,12 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, **kwargs ): - super(DynamicsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(DynamicsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'DynamicsSource' # type: str self.query = query self.additional_columns = additional_columns @@ -15098,11 +15643,11 @@ class EloquaLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param endpoint: Required. The endpoint of the Eloqua server. (i.e. eloqua.example.com). @@ -15111,7 +15656,7 @@ class EloquaLinkedService(LinkedService): sitename/username. (i.e. Eloqua/Alice). :type username: object :param password: The password corresponding to the user name. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. :type use_encrypted_endpoints: object @@ -15197,14 +15742,14 @@ class EloquaObjectDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ @@ -15265,12 +15810,15 @@ class EloquaSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -15286,8 +15834,9 @@ class EloquaSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -15298,12 +15847,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(EloquaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(EloquaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'EloquaSource' # type: str self.query = query @@ -15323,7 +15873,7 @@ class EncryptionConfiguration(msrest.serialization.Model): :type key_version: str :param identity: User assigned identity to use to authenticate to customer's key vault. If not provided Managed Service Identity will be used. - :type identity: ~data_factory_management_client.models.CmkIdentityDefinition + :type identity: ~azure.mgmt.datafactory.models.CmkIdentityDefinition """ _validation = { @@ -15359,7 +15909,7 @@ class EntityReference(msrest.serialization.Model): :param type: The type of this referenced entity. Possible values include: "IntegrationRuntimeReference", "LinkedServiceReference". - :type type: str or ~data_factory_management_client.models.IntegrationRuntimeEntityReferenceType + :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeEntityReferenceType :param reference_name: The name of this referenced entity. :type reference_name: str """ @@ -15438,19 +15988,22 @@ class ExcelDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param location: The location of the excel storage. - :type location: ~data_factory_management_client.models.DatasetLocation - :param sheet_name: The sheet of excel file. Type: string (or Expression with resultType + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param sheet_name: The sheet name of excel file. Type: string (or Expression with resultType string). :type sheet_name: object + :param sheet_index: The sheet index of excel file and default value is 0. Type: integer (or + Expression with resultType integer). + :type sheet_index: object :param range: The partial data of one sheet. Type: string (or Expression with resultType string). :type range: object @@ -15459,7 +16012,7 @@ class ExcelDataset(Dataset): false. Type: boolean (or Expression with resultType boolean). :type first_row_as_header: object :param compression: The data compression method used for the json dataset. - :type compression: ~data_factory_management_client.models.DatasetCompression + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression :param null_value: The null value string. Type: string (or Expression with resultType string). :type null_value: object """ @@ -15481,6 +16034,7 @@ class ExcelDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, 'sheet_name': {'key': 'typeProperties.sheetName', 'type': 'object'}, + 'sheet_index': {'key': 'typeProperties.sheetIndex', 'type': 'object'}, 'range': {'key': 'typeProperties.range', 'type': 'object'}, 'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'}, 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, @@ -15500,6 +16054,7 @@ def __init__( folder: Optional["DatasetFolder"] = None, location: Optional["DatasetLocation"] = None, sheet_name: Optional[object] = None, + sheet_index: Optional[object] = None, range: Optional[object] = None, first_row_as_header: Optional[object] = None, compression: Optional["DatasetCompression"] = None, @@ -15510,6 +16065,7 @@ def __init__( self.type = 'Excel' # type: str self.location = location self.sheet_name = sheet_name + self.sheet_index = sheet_index self.range = range self.first_row_as_header = first_row_as_header self.compression = compression @@ -15535,11 +16091,14 @@ class ExcelSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Excel store settings. - :type store_settings: ~data_factory_management_client.models.StoreReadSettings + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -15552,8 +16111,9 @@ class ExcelSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -15563,11 +16123,12 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, store_settings: Optional["StoreReadSettings"] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, **kwargs ): - super(ExcelSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(ExcelSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'ExcelSource' # type: str self.store_settings = store_settings self.additional_columns = additional_columns @@ -15588,22 +16149,21 @@ class ExecuteDataFlowActivity(ExecutionActivity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param linked_service_name: Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. - :type policy: ~data_factory_management_client.models.ActivityPolicy + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param data_flow: Required. Data flow reference. - :type data_flow: ~data_factory_management_client.models.DataFlowReference + :type data_flow: ~azure.mgmt.datafactory.models.DataFlowReference :param staging: Staging info for execute data flow activity. - :type staging: ~data_factory_management_client.models.DataFlowStagingInfo + :type staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo :param integration_runtime: The integration runtime reference. - :type integration_runtime: ~data_factory_management_client.models.IntegrationRuntimeReference + :type integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param compute: Compute properties for data flow activity. - :type compute: - ~data_factory_management_client.models.ExecuteDataFlowActivityTypePropertiesCompute + :type compute: ~azure.mgmt.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute :param trace_level: Trace level setting used for data flow monitoring output. Supported values are: 'coarse', 'fine', and 'none'. Type: string (or Expression with resultType string). :type trace_level: object @@ -15714,11 +16274,11 @@ class ExecutePipelineActivity(Activity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param pipeline: Required. Pipeline reference. - :type pipeline: ~data_factory_management_client.models.PipelineReference + :type pipeline: ~azure.mgmt.datafactory.models.PipelineReference :param parameters: Pipeline parameters. :type parameters: dict[str, object] :param wait_on_completion: Defines whether activity execution will wait for the dependent @@ -15779,15 +16339,15 @@ class ExecuteSsisPackageActivity(ExecutionActivity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param linked_service_name: Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. - :type policy: ~data_factory_management_client.models.ActivityPolicy + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param package_location: Required. SSIS package location. - :type package_location: ~data_factory_management_client.models.SsisPackageLocation + :type package_location: ~azure.mgmt.datafactory.models.SsisPackageLocation :param runtime: Specifies the runtime to execute SSIS package. The value should be "x86" or "x64". Type: string (or Expression with resultType string). :type runtime: object @@ -15798,15 +16358,13 @@ class ExecuteSsisPackageActivity(ExecutionActivity): Expression with resultType string). :type environment_path: object :param execution_credential: The package execution credential. - :type execution_credential: ~data_factory_management_client.models.SsisExecutionCredential + :type execution_credential: ~azure.mgmt.datafactory.models.SsisExecutionCredential :param connect_via: Required. The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param project_parameters: The project level parameters to execute the SSIS package. - :type project_parameters: dict[str, - ~data_factory_management_client.models.SsisExecutionParameter] + :type project_parameters: dict[str, ~azure.mgmt.datafactory.models.SsisExecutionParameter] :param package_parameters: The package level parameters to execute the SSIS package. - :type package_parameters: dict[str, - ~data_factory_management_client.models.SsisExecutionParameter] + :type package_parameters: dict[str, ~azure.mgmt.datafactory.models.SsisExecutionParameter] :param project_connection_managers: The project level connection managers to execute the SSIS package. :type project_connection_managers: dict[str, object] @@ -15814,10 +16372,9 @@ class ExecuteSsisPackageActivity(ExecutionActivity): package. :type package_connection_managers: dict[str, object] :param property_overrides: The property overrides to execute the SSIS package. - :type property_overrides: dict[str, - ~data_factory_management_client.models.SsisPropertyOverride] + :type property_overrides: dict[str, ~azure.mgmt.datafactory.models.SsisPropertyOverride] :param log_location: SSIS package execution log location. - :type log_location: ~data_factory_management_client.models.SsisLogLocation + :type log_location: ~azure.mgmt.datafactory.models.SsisLogLocation """ _validation = { @@ -15896,8 +16453,7 @@ class ExposureControlBatchRequest(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. :param exposure_control_requests: Required. List of exposure control features. - :type exposure_control_requests: - list[~data_factory_management_client.models.ExposureControlRequest] + :type exposure_control_requests: list[~azure.mgmt.datafactory.models.ExposureControlRequest] """ _validation = { @@ -15924,8 +16480,7 @@ class ExposureControlBatchResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. :param exposure_control_responses: Required. List of exposure control feature values. - :type exposure_control_responses: - list[~data_factory_management_client.models.ExposureControlResponse] + :type exposure_control_responses: list[~azure.mgmt.datafactory.models.ExposureControlResponse] """ _validation = { @@ -16109,7 +16664,7 @@ class Factory(Resource): collection. :type additional_properties: dict[str, object] :param identity: Managed service identity of the factory. - :type identity: ~data_factory_management_client.models.FactoryIdentity + :type identity: ~azure.mgmt.datafactory.models.FactoryIdentity :ivar provisioning_state: Factory provisioning state, example Succeeded. :vartype provisioning_state: str :ivar create_time: Time the factory was created in ISO8601 format. @@ -16117,15 +16672,14 @@ class Factory(Resource): :ivar version: Version of the factory. :vartype version: str :param repo_configuration: Git repo information of the factory. - :type repo_configuration: ~data_factory_management_client.models.FactoryRepoConfiguration + :type repo_configuration: ~azure.mgmt.datafactory.models.FactoryRepoConfiguration :param global_parameters: List of parameters for factory. - :type global_parameters: dict[str, - ~data_factory_management_client.models.GlobalParameterSpecification] + :type global_parameters: dict[str, ~azure.mgmt.datafactory.models.GlobalParameterSpecification] :param encryption: Properties to enable Customer Managed Key for the factory. - :type encryption: ~data_factory_management_client.models.EncryptionConfiguration + :type encryption: ~azure.mgmt.datafactory.models.EncryptionConfiguration :param public_network_access: Whether or not public network access is allowed for the data factory. Possible values include: "Enabled", "Disabled". - :type public_network_access: str or ~data_factory_management_client.models.PublicNetworkAccess + :type public_network_access: str or ~azure.mgmt.datafactory.models.PublicNetworkAccess """ _validation = { @@ -16307,7 +16861,7 @@ class FactoryIdentity(msrest.serialization.Model): :param type: Required. The identity type. Possible values include: "SystemAssigned", "UserAssigned", "SystemAssigned,UserAssigned". - :type type: str or ~data_factory_management_client.models.FactoryIdentityType + :type type: str or ~azure.mgmt.datafactory.models.FactoryIdentityType :ivar principal_id: The principal id of the identity. :vartype principal_id: str :ivar tenant_id: The client tenant id of the identity. @@ -16349,7 +16903,7 @@ class FactoryListResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. :param value: Required. List of factories. - :type value: list[~data_factory_management_client.models.Factory] + :type value: list[~azure.mgmt.datafactory.models.Factory] :param next_link: The link to the next page of results, if any remaining results exist. :type next_link: str """ @@ -16381,7 +16935,7 @@ class FactoryRepoUpdate(msrest.serialization.Model): :param factory_resource_id: The factory resource id. :type factory_resource_id: str :param repo_configuration: Git repo information of the factory. - :type repo_configuration: ~data_factory_management_client.models.FactoryRepoConfiguration + :type repo_configuration: ~azure.mgmt.datafactory.models.FactoryRepoConfiguration """ _attribute_map = { @@ -16407,7 +16961,7 @@ class FactoryUpdateParameters(msrest.serialization.Model): :param tags: A set of tags. The resource tags. :type tags: dict[str, str] :param identity: Managed service identity of the factory. - :type identity: ~data_factory_management_client.models.FactoryIdentity + :type identity: ~azure.mgmt.datafactory.models.FactoryIdentity """ _attribute_map = { @@ -16499,11 +17053,11 @@ class FileServerLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param host: Required. Host name of the server. Type: string (or Expression with resultType @@ -16513,7 +17067,7 @@ class FileServerLinkedService(LinkedService): string). :type user_id: object :param password: Password to logon the server. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -16614,6 +17168,9 @@ class FileServerReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -16654,6 +17211,7 @@ class FileServerReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -16671,6 +17229,7 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, recursive: Optional[object] = None, wildcard_folder_path: Optional[object] = None, wildcard_file_name: Optional[object] = None, @@ -16683,7 +17242,7 @@ def __init__( file_filter: Optional[object] = None, **kwargs ): - super(FileServerReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(FileServerReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'FileServerReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path @@ -16710,6 +17269,9 @@ class FileServerWriteSettings(StoreWriteSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object """ @@ -16722,6 +17284,7 @@ class FileServerWriteSettings(StoreWriteSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } @@ -16730,10 +17293,11 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, copy_behavior: Optional[object] = None, **kwargs ): - super(FileServerWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + super(FileServerWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, copy_behavior=copy_behavior, **kwargs) self.type = 'FileServerWriteSettings' # type: str @@ -16756,14 +17320,14 @@ class FileShareDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param folder_path: The path of the on-premises file system. Type: string (or Expression with resultType string). :type folder_path: object @@ -16777,12 +17341,12 @@ class FileShareDataset(Dataset): with resultType string). :type modified_datetime_end: object :param format: The format of the files. - :type format: ~data_factory_management_client.models.DatasetStorageFormat + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :param file_filter: Specify a filter to be used to select a subset of files in the folderPath rather than all files. Type: string (or Expression with resultType string). :type file_filter: object :param compression: The data compression method used for the file system. - :type compression: ~data_factory_management_client.models.DatasetCompression + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression """ _validation = { @@ -16865,6 +17429,9 @@ class FileSystemSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object """ @@ -16881,6 +17448,7 @@ class FileSystemSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } @@ -16893,10 +17461,11 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, copy_behavior: Optional[object] = None, **kwargs ): - super(FileSystemSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(FileSystemSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'FileSystemSink' # type: str self.copy_behavior = copy_behavior @@ -16920,12 +17489,15 @@ class FileSystemSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -16938,8 +17510,9 @@ class FileSystemSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -16949,11 +17522,12 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, recursive: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, **kwargs ): - super(FileSystemSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(FileSystemSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'FileSystemSource' # type: str self.recursive = recursive self.additional_columns = additional_columns @@ -16974,13 +17548,13 @@ class FilterActivity(Activity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param items: Required. Input array on which filter should be applied. - :type items: ~data_factory_management_client.models.Expression + :type items: ~azure.mgmt.datafactory.models.Expression :param condition: Required. Condition to be used for filtering the input. - :type condition: ~data_factory_management_client.models.Expression + :type condition: ~azure.mgmt.datafactory.models.Expression """ _validation = { @@ -17034,18 +17608,18 @@ class ForEachActivity(Activity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param is_sequential: Should the loop be executed in sequence or in parallel (max 50). :type is_sequential: bool :param batch_count: Batch count to be used for controlling the number of parallel execution (when isSequential is set to false). :type batch_count: int :param items: Required. Collection to iterate. - :type items: ~data_factory_management_client.models.Expression + :type items: ~azure.mgmt.datafactory.models.Expression :param activities: Required. List of activities to execute . - :type activities: list[~data_factory_management_client.models.Activity] + :type activities: list[~azure.mgmt.datafactory.models.Activity] """ _validation = { @@ -17104,6 +17678,9 @@ class FtpReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -17137,6 +17714,7 @@ class FtpReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -17152,6 +17730,7 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, recursive: Optional[object] = None, wildcard_folder_path: Optional[object] = None, wildcard_file_name: Optional[object] = None, @@ -17162,7 +17741,7 @@ def __init__( use_binary_transfer: Optional[bool] = None, **kwargs ): - super(FtpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(FtpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'FtpReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path @@ -17185,11 +17764,11 @@ class FtpServerLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param host: Required. Host name of the FTP server. Type: string (or Expression with resultType @@ -17200,12 +17779,12 @@ class FtpServerLinkedService(LinkedService): :type port: object :param authentication_type: The authentication type to be used to connect to the FTP server. Possible values include: "Basic", "Anonymous". - :type authentication_type: str or ~data_factory_management_client.models.FtpAuthenticationType + :type authentication_type: str or ~azure.mgmt.datafactory.models.FtpAuthenticationType :param user_name: Username to logon the FTP server. Type: string (or Expression with resultType string). :type user_name: object :param password: Password to logon the FTP server. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -17354,21 +17933,21 @@ class GetMetadataActivity(ExecutionActivity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param linked_service_name: Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. - :type policy: ~data_factory_management_client.models.ActivityPolicy + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param dataset: Required. GetMetadata activity dataset reference. - :type dataset: ~data_factory_management_client.models.DatasetReference + :type dataset: ~azure.mgmt.datafactory.models.DatasetReference :param field_list: Fields of metadata to get from dataset. :type field_list: list[object] :param store_settings: GetMetadata activity store settings. - :type store_settings: ~data_factory_management_client.models.StoreReadSettings + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :param format_settings: GetMetadata activity format settings. - :type format_settings: ~data_factory_management_client.models.FormatReadSettings + :type format_settings: ~azure.mgmt.datafactory.models.FormatReadSettings """ _validation = { @@ -17446,6 +18025,8 @@ class GitHubAccessTokenRequest(msrest.serialization.Model): :type git_hub_access_code: str :param git_hub_client_id: GitHub application client ID. :type git_hub_client_id: str + :param git_hub_client_secret: GitHub bring your own app client secret information. + :type git_hub_client_secret: ~azure.mgmt.datafactory.models.GitHubClientSecret :param git_hub_access_token_base_url: Required. GitHub access token base URL. :type git_hub_access_token_base_url: str """ @@ -17458,6 +18039,7 @@ class GitHubAccessTokenRequest(msrest.serialization.Model): _attribute_map = { 'git_hub_access_code': {'key': 'gitHubAccessCode', 'type': 'str'}, 'git_hub_client_id': {'key': 'gitHubClientId', 'type': 'str'}, + 'git_hub_client_secret': {'key': 'gitHubClientSecret', 'type': 'GitHubClientSecret'}, 'git_hub_access_token_base_url': {'key': 'gitHubAccessTokenBaseUrl', 'type': 'str'}, } @@ -17467,11 +18049,13 @@ def __init__( git_hub_access_code: str, git_hub_access_token_base_url: str, git_hub_client_id: Optional[str] = None, + git_hub_client_secret: Optional["GitHubClientSecret"] = None, **kwargs ): super(GitHubAccessTokenRequest, self).__init__(**kwargs) self.git_hub_access_code = git_hub_access_code self.git_hub_client_id = git_hub_client_id + self.git_hub_client_secret = git_hub_client_secret self.git_hub_access_token_base_url = git_hub_access_token_base_url @@ -17496,6 +18080,32 @@ def __init__( self.git_hub_access_token = git_hub_access_token +class GitHubClientSecret(msrest.serialization.Model): + """Client secret information for factory's bring your own app repository configuration. + + :param byoa_secret_akv_url: Bring your own app client secret AKV URL. + :type byoa_secret_akv_url: str + :param byoa_secret_name: Bring your own app client secret name in AKV. + :type byoa_secret_name: str + """ + + _attribute_map = { + 'byoa_secret_akv_url': {'key': 'byoaSecretAkvUrl', 'type': 'str'}, + 'byoa_secret_name': {'key': 'byoaSecretName', 'type': 'str'}, + } + + def __init__( + self, + *, + byoa_secret_akv_url: Optional[str] = None, + byoa_secret_name: Optional[str] = None, + **kwargs + ): + super(GitHubClientSecret, self).__init__(**kwargs) + self.byoa_secret_akv_url = byoa_secret_akv_url + self.byoa_secret_name = byoa_secret_name + + class GlobalParameterSpecification(msrest.serialization.Model): """Definition of a single parameter for an entity. @@ -17503,7 +18113,7 @@ class GlobalParameterSpecification(msrest.serialization.Model): :param type: Required. Global Parameter type. Possible values include: "Object", "String", "Int", "Float", "Bool", "Array". - :type type: str or ~data_factory_management_client.models.GlobalParameterType + :type type: str or ~azure.mgmt.datafactory.models.GlobalParameterType :param value: Required. Value of parameter. :type value: object """ @@ -17541,11 +18151,11 @@ class GoogleAdWordsLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param client_customer_id: Required. The Client customer ID of the AdWords account that you @@ -17553,21 +18163,21 @@ class GoogleAdWordsLinkedService(LinkedService): :type client_customer_id: object :param developer_token: Required. The developer token associated with the manager account that you use to grant access to the AdWords API. - :type developer_token: ~data_factory_management_client.models.SecretBase + :type developer_token: ~azure.mgmt.datafactory.models.SecretBase :param authentication_type: Required. The OAuth 2.0 authentication mechanism used for authentication. ServiceAuthentication can only be used on self-hosted IR. Possible values include: "ServiceAuthentication", "UserAuthentication". :type authentication_type: str or - ~data_factory_management_client.models.GoogleAdWordsAuthenticationType + ~azure.mgmt.datafactory.models.GoogleAdWordsAuthenticationType :param refresh_token: The refresh token obtained from Google for authorizing access to AdWords for UserAuthentication. - :type refresh_token: ~data_factory_management_client.models.SecretBase + :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase :param client_id: The client id of the google application used to acquire the refresh token. Type: string (or Expression with resultType string). :type client_id: object :param client_secret: The client secret of the google application used to acquire the refresh token. - :type client_secret: ~data_factory_management_client.models.SecretBase + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase :param email: The service account email ID that is used for ServiceAuthentication and can only be used on self-hosted IR. :type email: object @@ -17669,14 +18279,14 @@ class GoogleAdWordsObjectDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ @@ -17737,12 +18347,15 @@ class GoogleAdWordsSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -17758,8 +18371,9 @@ class GoogleAdWordsSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -17770,12 +18384,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(GoogleAdWordsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(GoogleAdWordsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'GoogleAdWordsSource' # type: str self.query = query @@ -17791,11 +18406,11 @@ class GoogleBigQueryLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param project: Required. The default BigQuery project to query against. @@ -17810,16 +18425,16 @@ class GoogleBigQueryLinkedService(LinkedService): authentication. ServiceAuthentication can only be used on self-hosted IR. Possible values include: "ServiceAuthentication", "UserAuthentication". :type authentication_type: str or - ~data_factory_management_client.models.GoogleBigQueryAuthenticationType + ~azure.mgmt.datafactory.models.GoogleBigQueryAuthenticationType :param refresh_token: The refresh token obtained from Google for authorizing access to BigQuery for UserAuthentication. - :type refresh_token: ~data_factory_management_client.models.SecretBase + :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase :param client_id: The client id of the google application used to acquire the refresh token. Type: string (or Expression with resultType string). :type client_id: object :param client_secret: The client secret of the google application used to acquire the refresh token. - :type client_secret: ~data_factory_management_client.models.SecretBase + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase :param email: The service account email ID that is used for ServiceAuthentication and can only be used on self-hosted IR. :type email: object @@ -17923,14 +18538,14 @@ class GoogleBigQueryObjectDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using database + table properties instead. :type table_name: object @@ -18004,12 +18619,15 @@ class GoogleBigQuerySource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -18025,8 +18643,9 @@ class GoogleBigQuerySource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -18037,12 +18656,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(GoogleBigQuerySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(GoogleBigQuerySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'GoogleBigQuerySource' # type: str self.query = query @@ -18058,11 +18678,11 @@ class GoogleCloudStorageLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param access_key_id: The access key identifier of the Google Cloud Storage Identity and Access @@ -18070,7 +18690,7 @@ class GoogleCloudStorageLinkedService(LinkedService): :type access_key_id: object :param secret_access_key: The secret access key of the Google Cloud Storage Identity and Access Management (IAM) user. - :type secret_access_key: ~data_factory_management_client.models.SecretBase + :type secret_access_key: ~azure.mgmt.datafactory.models.SecretBase :param service_url: This value specifies the endpoint to access with the Google Cloud Storage Connector. This is an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType @@ -18187,6 +18807,9 @@ class GoogleCloudStorageReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -18227,6 +18850,7 @@ class GoogleCloudStorageReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -18244,6 +18868,7 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, recursive: Optional[object] = None, wildcard_folder_path: Optional[object] = None, wildcard_file_name: Optional[object] = None, @@ -18256,7 +18881,7 @@ def __init__( modified_datetime_end: Optional[object] = None, **kwargs ): - super(GoogleCloudStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(GoogleCloudStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'GoogleCloudStorageReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path @@ -18281,18 +18906,18 @@ class GreenplumLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param pwd: The Azure key vault secret reference of password in connection string. - :type pwd: ~data_factory_management_client.models.AzureKeyVaultSecretReference + :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -18354,12 +18979,15 @@ class GreenplumSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -18375,8 +19003,9 @@ class GreenplumSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -18387,12 +19016,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(GreenplumSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(GreenplumSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'GreenplumSource' # type: str self.query = query @@ -18416,14 +19046,14 @@ class GreenplumTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. :type table_name: object @@ -18488,11 +19118,11 @@ class HBaseLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param host: Required. The IP address or host name of the HBase server. (i.e. 192.168.222.160). @@ -18505,12 +19135,11 @@ class HBaseLinkedService(LinkedService): :type http_path: object :param authentication_type: Required. The authentication mechanism to use to connect to the HBase server. Possible values include: "Anonymous", "Basic". - :type authentication_type: str or - ~data_factory_management_client.models.HBaseAuthenticationType + :type authentication_type: str or ~azure.mgmt.datafactory.models.HBaseAuthenticationType :param username: The user name used to connect to the HBase instance. :type username: object :param password: The password corresponding to the user name. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. :type enable_ssl: object @@ -18611,14 +19240,14 @@ class HBaseObjectDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ @@ -18679,12 +19308,15 @@ class HBaseSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -18700,8 +19332,9 @@ class HBaseSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -18712,12 +19345,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(HBaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(HBaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'HBaseSource' # type: str self.query = query @@ -18733,11 +19367,11 @@ class HdfsLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param url: Required. The URL of the HDFS service endpoint, e.g. @@ -18754,7 +19388,7 @@ class HdfsLinkedService(LinkedService): resultType string). :type user_name: object :param password: Password for Windows authentication. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase """ _validation = { @@ -18854,6 +19488,9 @@ class HdfsReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -18879,7 +19516,7 @@ class HdfsReadSettings(StoreReadSettings): with resultType string). :type modified_datetime_end: object :param distcp_settings: Specifies Distcp-related settings. - :type distcp_settings: ~data_factory_management_client.models.DistcpSettings + :type distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings :param delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). :type delete_files_after_completion: object @@ -18893,6 +19530,7 @@ class HdfsReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -18910,6 +19548,7 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, recursive: Optional[object] = None, wildcard_folder_path: Optional[object] = None, wildcard_file_name: Optional[object] = None, @@ -18922,7 +19561,7 @@ def __init__( delete_files_after_completion: Optional[object] = None, **kwargs ): - super(HdfsReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(HdfsReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'HdfsReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path @@ -18955,11 +19594,14 @@ class HdfsSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object :param distcp_settings: Specifies Distcp-related settings. - :type distcp_settings: ~data_factory_management_client.models.DistcpSettings + :type distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings """ _validation = { @@ -18972,6 +19614,7 @@ class HdfsSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, } @@ -18983,11 +19626,12 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, recursive: Optional[object] = None, distcp_settings: Optional["DistcpSettings"] = None, **kwargs ): - super(HdfsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(HdfsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'HdfsSource' # type: str self.recursive = recursive self.distcp_settings = distcp_settings @@ -19008,25 +19652,23 @@ class HdInsightHiveActivity(ExecutionActivity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param linked_service_name: Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. - :type policy: ~data_factory_management_client.models.ActivityPolicy + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param storage_linked_services: Storage linked service references. - :type storage_linked_services: - list[~data_factory_management_client.models.LinkedServiceReference] + :type storage_linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] :param arguments: User specified arguments to HDInsightActivity. :type arguments: list[object] :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". - :type get_debug_info: str or - ~data_factory_management_client.models.HdInsightActivityDebugInfoOption + :type get_debug_info: str or ~azure.mgmt.datafactory.models.HdInsightActivityDebugInfoOption :param script_path: Script path. Type: string (or Expression with resultType string). :type script_path: object :param script_linked_service: Script linked service reference. - :type script_linked_service: ~data_factory_management_client.models.LinkedServiceReference + :type script_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :param defines: Allows user to specify defines for Hive job request. :type defines: dict[str, object] :param variables: User specified arguments under hivevar namespace. @@ -19103,11 +19745,11 @@ class HdInsightLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param cluster_uri: Required. HDInsight cluster URI. Type: string (or Expression with @@ -19117,13 +19759,12 @@ class HdInsightLinkedService(LinkedService): string). :type user_name: object :param password: HDInsight cluster password. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param linked_service_name: The Azure Storage linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param hcatalog_linked_service_name: A reference to the Azure SQL linked service that points to the HCatalog database. - :type hcatalog_linked_service_name: - ~data_factory_management_client.models.LinkedServiceReference + :type hcatalog_linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -19203,27 +19844,25 @@ class HdInsightMapReduceActivity(ExecutionActivity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param linked_service_name: Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. - :type policy: ~data_factory_management_client.models.ActivityPolicy + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param storage_linked_services: Storage linked service references. - :type storage_linked_services: - list[~data_factory_management_client.models.LinkedServiceReference] + :type storage_linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] :param arguments: User specified arguments to HDInsightActivity. :type arguments: list[object] :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". - :type get_debug_info: str or - ~data_factory_management_client.models.HdInsightActivityDebugInfoOption + :type get_debug_info: str or ~azure.mgmt.datafactory.models.HdInsightActivityDebugInfoOption :param class_name: Required. Class name. Type: string (or Expression with resultType string). :type class_name: object :param jar_file_path: Required. Jar path. Type: string (or Expression with resultType string). :type jar_file_path: object :param jar_linked_service: Jar linked service reference. - :type jar_linked_service: ~data_factory_management_client.models.LinkedServiceReference + :type jar_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :param jar_libs: Jar libs. :type jar_libs: list[object] :param defines: Allows user to specify defines for the MapReduce job request. @@ -19299,11 +19938,11 @@ class HdInsightOnDemandLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param cluster_size: Required. Number of worker/data nodes in the cluster. Suggestion value: 4. @@ -19319,7 +19958,7 @@ class HdInsightOnDemandLinkedService(LinkedService): :type version: object :param linked_service_name: Required. Azure Storage linked service to be used by the on-demand cluster for storing and processing data. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param host_subscription_id: Required. The customer’s subscription to host the cluster. Type: string (or Expression with resultType string). :type host_subscription_id: object @@ -19327,7 +19966,7 @@ class HdInsightOnDemandLinkedService(LinkedService): (or Expression with resultType string). :type service_principal_id: object :param service_principal_key: The key for the service principal id. - :type service_principal_key: ~data_factory_management_client.models.SecretBase + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: Required. The Tenant id/name to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object @@ -19341,21 +19980,20 @@ class HdInsightOnDemandLinkedService(LinkedService): resultType string). :type cluster_user_name: object :param cluster_password: The password to access the cluster. - :type cluster_password: ~data_factory_management_client.models.SecretBase + :type cluster_password: ~azure.mgmt.datafactory.models.SecretBase :param cluster_ssh_user_name: The username to SSH remotely connect to cluster’s node (for Linux). Type: string (or Expression with resultType string). :type cluster_ssh_user_name: object :param cluster_ssh_password: The password to SSH remotely connect cluster’s node (for Linux). - :type cluster_ssh_password: ~data_factory_management_client.models.SecretBase + :type cluster_ssh_password: ~azure.mgmt.datafactory.models.SecretBase :param additional_linked_service_names: Specifies additional storage accounts for the HDInsight linked service so that the Data Factory service can register them on your behalf. :type additional_linked_service_names: - list[~data_factory_management_client.models.LinkedServiceReference] + list[~azure.mgmt.datafactory.models.LinkedServiceReference] :param hcatalog_linked_service_name: The name of Azure SQL linked service that point to the HCatalog database. The on-demand HDInsight cluster is created by using the Azure SQL database as the metastore. - :type hcatalog_linked_service_name: - ~data_factory_management_client.models.LinkedServiceReference + :type hcatalog_linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param cluster_type: The cluster type. Type: string (or Expression with resultType string). :type cluster_type: object :param spark_version: The version of spark if the cluster type is 'spark'. Type: string (or @@ -19400,13 +20038,15 @@ class HdInsightOnDemandLinkedService(LinkedService): Please refer to https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize- cluster-linux?toc=%2Fen-us%2Fazure%2Fhdinsight%2Fr-server%2FTOC.json&bc=%2Fen- us%2Fazure%2Fbread%2Ftoc.json#understanding-script-actions. - :type script_actions: list[~data_factory_management_client.models.ScriptAction] + :type script_actions: list[~azure.mgmt.datafactory.models.ScriptAction] :param virtual_network_id: The ARM resource ID for the vNet to which the cluster should be joined after creation. Type: string (or Expression with resultType string). :type virtual_network_id: object :param subnet_name: The ARM resource ID for the subnet in the vNet. If virtualNetworkId was specified, then this property is required. Type: string (or Expression with resultType string). :type subnet_name: object + :param credential: The credential reference containing authentication information. + :type credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { @@ -19460,6 +20100,7 @@ class HdInsightOnDemandLinkedService(LinkedService): 'script_actions': {'key': 'typeProperties.scriptActions', 'type': '[ScriptAction]'}, 'virtual_network_id': {'key': 'typeProperties.virtualNetworkId', 'type': 'object'}, 'subnet_name': {'key': 'typeProperties.subnetName', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, } def __init__( @@ -19503,6 +20144,7 @@ def __init__( script_actions: Optional[List["ScriptAction"]] = None, virtual_network_id: Optional[object] = None, subnet_name: Optional[object] = None, + credential: Optional["CredentialReference"] = None, **kwargs ): super(HdInsightOnDemandLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -19540,6 +20182,7 @@ def __init__( self.script_actions = script_actions self.virtual_network_id = virtual_network_id self.subnet_name = subnet_name + self.credential = credential class HdInsightPigActivity(ExecutionActivity): @@ -19557,26 +20200,24 @@ class HdInsightPigActivity(ExecutionActivity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param linked_service_name: Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. - :type policy: ~data_factory_management_client.models.ActivityPolicy + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param storage_linked_services: Storage linked service references. - :type storage_linked_services: - list[~data_factory_management_client.models.LinkedServiceReference] + :type storage_linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] :param arguments: User specified arguments to HDInsightActivity. Type: array (or Expression with resultType array). :type arguments: object :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". - :type get_debug_info: str or - ~data_factory_management_client.models.HdInsightActivityDebugInfoOption + :type get_debug_info: str or ~azure.mgmt.datafactory.models.HdInsightActivityDebugInfoOption :param script_path: Script path. Type: string (or Expression with resultType string). :type script_path: object :param script_linked_service: Script linked service reference. - :type script_linked_service: ~data_factory_management_client.models.LinkedServiceReference + :type script_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :param defines: Allows user to specify defines for Pig job request. :type defines: dict[str, object] """ @@ -19646,13 +20287,13 @@ class HdInsightSparkActivity(ExecutionActivity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param linked_service_name: Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. - :type policy: ~data_factory_management_client.models.ActivityPolicy + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param root_path: Required. The root path in 'sparkJobLinkedService' for all the job’s files. Type: string (or Expression with resultType string). :type root_path: object @@ -19662,11 +20303,10 @@ class HdInsightSparkActivity(ExecutionActivity): :param arguments: The user-specified arguments to HDInsightSparkActivity. :type arguments: list[object] :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". - :type get_debug_info: str or - ~data_factory_management_client.models.HdInsightActivityDebugInfoOption + :type get_debug_info: str or ~azure.mgmt.datafactory.models.HdInsightActivityDebugInfoOption :param spark_job_linked_service: The storage linked service for uploading the entry file and dependencies, and for receiving logs. - :type spark_job_linked_service: ~data_factory_management_client.models.LinkedServiceReference + :type spark_job_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :param class_name: The application's Java/Spark main class. :type class_name: str :param proxy_user: The user to impersonate that will execute the job. Type: string (or @@ -19749,21 +20389,19 @@ class HdInsightStreamingActivity(ExecutionActivity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param linked_service_name: Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. - :type policy: ~data_factory_management_client.models.ActivityPolicy + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param storage_linked_services: Storage linked service references. - :type storage_linked_services: - list[~data_factory_management_client.models.LinkedServiceReference] + :type storage_linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] :param arguments: User specified arguments to HDInsightActivity. :type arguments: list[object] :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". - :type get_debug_info: str or - ~data_factory_management_client.models.HdInsightActivityDebugInfoOption + :type get_debug_info: str or ~azure.mgmt.datafactory.models.HdInsightActivityDebugInfoOption :param mapper: Required. Mapper executable name. Type: string (or Expression with resultType string). :type mapper: object @@ -19777,7 +20415,7 @@ class HdInsightStreamingActivity(ExecutionActivity): :param file_paths: Required. Paths to streaming job files. Can be directories. :type file_paths: list[object] :param file_linked_service: Linked service reference where the files are located. - :type file_linked_service: ~data_factory_management_client.models.LinkedServiceReference + :type file_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :param combiner: Combiner executable name. Type: string (or Expression with resultType string). :type combiner: object :param command_environment: Command line environment values. @@ -19870,11 +20508,11 @@ class HiveLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param host: Required. IP address or host name of the Hive server, separated by ';' for @@ -19884,15 +20522,15 @@ class HiveLinkedService(LinkedService): :type port: object :param server_type: The type of Hive server. Possible values include: "HiveServer1", "HiveServer2", "HiveThriftServer". - :type server_type: str or ~data_factory_management_client.models.HiveServerType + :type server_type: str or ~azure.mgmt.datafactory.models.HiveServerType :param thrift_transport_protocol: The transport protocol to use in the Thrift layer. Possible values include: "Binary", "SASL", "HTTP ". :type thrift_transport_protocol: str or - ~data_factory_management_client.models.HiveThriftTransportProtocol + ~azure.mgmt.datafactory.models.HiveThriftTransportProtocol :param authentication_type: Required. The authentication method used to access the Hive server. Possible values include: "Anonymous", "Username", "UsernameAndPassword", "WindowsAzureHDInsightService". - :type authentication_type: str or ~data_factory_management_client.models.HiveAuthenticationType + :type authentication_type: str or ~azure.mgmt.datafactory.models.HiveAuthenticationType :param service_discovery_mode: true to indicate using the ZooKeeper service, false not. :type service_discovery_mode: object :param zoo_keeper_name_space: The namespace on ZooKeeper under which Hive Server 2 nodes are @@ -19905,7 +20543,7 @@ class HiveLinkedService(LinkedService): :type username: object :param password: The password corresponding to the user name that you provided in the Username field. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param http_path: The partial URL corresponding to the Hive server. :type http_path: object :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The @@ -20029,14 +20667,14 @@ class HiveObjectDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. :type table_name: object @@ -20109,12 +20747,15 @@ class HiveSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -20130,8 +20771,9 @@ class HiveSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -20142,12 +20784,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(HiveSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(HiveSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'HiveSource' # type: str self.query = query @@ -20171,14 +20814,14 @@ class HttpDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param relative_url: The relative URL based on the URL in the HttpLinkedService refers to an HTTP file Type: string (or Expression with resultType string). :type relative_url: object @@ -20195,9 +20838,9 @@ class HttpDataset(Dataset): string). :type additional_headers: object :param format: The format of files. - :type format: ~data_factory_management_client.models.DatasetStorageFormat + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :param compression: The data compression method used on files. - :type compression: ~data_factory_management_client.models.DatasetCompression + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression """ _validation = { @@ -20263,11 +20906,11 @@ class HttpLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param url: Required. The base URL of the HTTP endpoint, e.g. http://www.microsoft.com. Type: @@ -20275,13 +20918,13 @@ class HttpLinkedService(LinkedService): :type url: object :param authentication_type: The authentication type to be used to connect to the HTTP server. Possible values include: "Basic", "Anonymous", "Digest", "Windows", "ClientCertificate". - :type authentication_type: str or ~data_factory_management_client.models.HttpAuthenticationType + :type authentication_type: str or ~azure.mgmt.datafactory.models.HttpAuthenticationType :param user_name: User name for Basic, Digest, or Windows authentication. Type: string (or Expression with resultType string). :type user_name: object :param password: Password for Basic, Digest, Windows, or ClientCertificate with EmbeddedCertData authentication. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param auth_headers: The additional HTTP headers in the request to RESTful API used for authorization. Type: object (or Expression with resultType object). :type auth_headers: object @@ -20372,6 +21015,9 @@ class HttpReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param request_method: The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). :type request_method: object @@ -20399,6 +21045,7 @@ class HttpReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'request_method': {'key': 'requestMethod', 'type': 'object'}, 'request_body': {'key': 'requestBody', 'type': 'object'}, 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, @@ -20412,6 +21059,7 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, request_method: Optional[object] = None, request_body: Optional[object] = None, additional_headers: Optional[object] = None, @@ -20420,7 +21068,7 @@ def __init__( partition_root_path: Optional[object] = None, **kwargs ): - super(HttpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(HttpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'HttpReadSettings' # type: str self.request_method = request_method self.request_body = request_body @@ -20496,6 +21144,9 @@ class HttpSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param http_request_timeout: Specifies the timeout for a HTTP client to get HTTP response from HTTP server. The default value is equivalent to System.Net.HttpWebRequest.Timeout. Type: string (or Expression with resultType string), pattern: @@ -20513,6 +21164,7 @@ class HttpSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } @@ -20523,10 +21175,11 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, http_request_timeout: Optional[object] = None, **kwargs ): - super(HttpSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(HttpSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'HttpSource' # type: str self.http_request_timeout = http_request_timeout @@ -20542,23 +21195,23 @@ class HubspotLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param client_id: Required. The client ID associated with your Hubspot application. :type client_id: object :param client_secret: The client secret associated with your Hubspot application. - :type client_secret: ~data_factory_management_client.models.SecretBase + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase :param access_token: The access token obtained when initially authenticating your OAuth integration. - :type access_token: ~data_factory_management_client.models.SecretBase + :type access_token: ~azure.mgmt.datafactory.models.SecretBase :param refresh_token: The refresh token obtained when initially authenticating your OAuth integration. - :type refresh_token: ~data_factory_management_client.models.SecretBase + :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. :type use_encrypted_endpoints: object @@ -20646,14 +21299,14 @@ class HubspotObjectDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ @@ -20714,12 +21367,15 @@ class HubspotSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -20735,8 +21391,9 @@ class HubspotSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -20747,12 +21404,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(HubspotSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(HubspotSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'HubspotSource' # type: str self.query = query @@ -20772,19 +21430,19 @@ class IfConditionActivity(Activity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param expression: Required. An expression that would evaluate to Boolean. This is used to determine the block of activities (ifTrueActivities or ifFalseActivities) that will be executed. - :type expression: ~data_factory_management_client.models.Expression + :type expression: ~azure.mgmt.datafactory.models.Expression :param if_true_activities: List of activities to execute if expression is evaluated to true. This is an optional property and if not provided, the activity will exit without any action. - :type if_true_activities: list[~data_factory_management_client.models.Activity] + :type if_true_activities: list[~azure.mgmt.datafactory.models.Activity] :param if_false_activities: List of activities to execute if expression is evaluated to false. This is an optional property and if not provided, the activity will exit without any action. - :type if_false_activities: list[~data_factory_management_client.models.Activity] + :type if_false_activities: list[~azure.mgmt.datafactory.models.Activity] """ _validation = { @@ -20836,11 +21494,11 @@ class ImpalaLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param host: Required. The IP address or host name of the Impala server. (i.e. @@ -20851,13 +21509,12 @@ class ImpalaLinkedService(LinkedService): :type port: object :param authentication_type: Required. The authentication type to use. Possible values include: "Anonymous", "SASLUsername", "UsernameAndPassword". - :type authentication_type: str or - ~data_factory_management_client.models.ImpalaAuthenticationType + :type authentication_type: str or ~azure.mgmt.datafactory.models.ImpalaAuthenticationType :param username: The user name used to access the Impala server. The default value is anonymous when using SASLUsername. :type username: object :param password: The password corresponding to the user name when using UsernameAndPassword. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. :type enable_ssl: object @@ -20961,14 +21618,14 @@ class ImpalaObjectDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. :type table_name: object @@ -21042,12 +21699,15 @@ class ImpalaSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -21063,8 +21723,9 @@ class ImpalaSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -21075,12 +21736,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(ImpalaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(ImpalaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'ImpalaSource' # type: str self.query = query @@ -21096,11 +21758,11 @@ class InformixLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: Required. The non-access credential portion of the connection string @@ -21113,12 +21775,12 @@ class InformixLinkedService(LinkedService): :type authentication_type: object :param credential: The access credential portion of the connection string specified in driver- specific property-value format. - :type credential: ~data_factory_management_client.models.SecretBase + :type credential: ~azure.mgmt.datafactory.models.SecretBase :param user_name: User name for Basic authentication. Type: string (or Expression with resultType string). :type user_name: object :param password: Password for Basic authentication. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -21196,6 +21858,9 @@ class InformixSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). :type pre_copy_script: object @@ -21213,6 +21878,7 @@ class InformixSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -21225,10 +21891,11 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, pre_copy_script: Optional[object] = None, **kwargs ): - super(InformixSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(InformixSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'InformixSink' # type: str self.pre_copy_script = pre_copy_script @@ -21252,12 +21919,15 @@ class InformixSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object """ @@ -21272,8 +21942,9 @@ class InformixSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -21284,12 +21955,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(InformixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(InformixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'InformixSource' # type: str self.query = query @@ -21313,14 +21985,14 @@ class InformixTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The Informix table name. Type: string (or Expression with resultType string). :type table_name: object @@ -21376,7 +22048,7 @@ class IntegrationRuntime(msrest.serialization.Model): :type additional_properties: dict[str, object] :param type: Required. Type of integration runtime.Constant filled by server. Possible values include: "Managed", "SelfHosted". - :type type: str or ~data_factory_management_client.models.IntegrationRuntimeType + :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType :param description: Integration runtime description. :type description: str """ @@ -21452,10 +22124,9 @@ class IntegrationRuntimeComputeProperties(msrest.serialization.Model): integration runtime. :type max_parallel_executions_per_node: int :param data_flow_properties: Data flow properties for managed integration runtime. - :type data_flow_properties: - ~data_factory_management_client.models.IntegrationRuntimeDataFlowProperties + :type data_flow_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeDataFlowProperties :param v_net_properties: VNet properties for managed integration runtime. - :type v_net_properties: ~data_factory_management_client.models.IntegrationRuntimeVNetProperties + :type v_net_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeVNetProperties """ _validation = { @@ -21562,7 +22233,7 @@ class IntegrationRuntimeCustomSetupScriptProperties(msrest.serialization.Model): script. :type blob_container_uri: str :param sas_token: The SAS token of the Azure blob container. - :type sas_token: ~data_factory_management_client.models.SecureString + :type sas_token: ~azure.mgmt.datafactory.models.SecureString """ _attribute_map = { @@ -21590,13 +22261,16 @@ class IntegrationRuntimeDataFlowProperties(msrest.serialization.Model): :type additional_properties: dict[str, object] :param compute_type: Compute type of the cluster which will execute data flow job. Possible values include: "General", "MemoryOptimized", "ComputeOptimized". - :type compute_type: str or ~data_factory_management_client.models.DataFlowComputeType + :type compute_type: str or ~azure.mgmt.datafactory.models.DataFlowComputeType :param core_count: Core count of the cluster which will execute data flow job. Supported values are: 8, 16, 32, 48, 80, 144 and 272. :type core_count: int :param time_to_live: Time to live (in minutes) setting of the cluster which will execute data flow job. :type time_to_live: int + :param cleanup: Cluster will not be recycled and it will be used in next data flow activity run + until TTL (time to live) is reached if this is set as false. Default is true. + :type cleanup: bool """ _validation = { @@ -21608,6 +22282,7 @@ class IntegrationRuntimeDataFlowProperties(msrest.serialization.Model): 'compute_type': {'key': 'computeType', 'type': 'str'}, 'core_count': {'key': 'coreCount', 'type': 'int'}, 'time_to_live': {'key': 'timeToLive', 'type': 'int'}, + 'cleanup': {'key': 'cleanup', 'type': 'bool'}, } def __init__( @@ -21617,6 +22292,7 @@ def __init__( compute_type: Optional[Union[str, "DataFlowComputeType"]] = None, core_count: Optional[int] = None, time_to_live: Optional[int] = None, + cleanup: Optional[bool] = None, **kwargs ): super(IntegrationRuntimeDataFlowProperties, self).__init__(**kwargs) @@ -21624,15 +22300,16 @@ def __init__( self.compute_type = compute_type self.core_count = core_count self.time_to_live = time_to_live + self.cleanup = cleanup class IntegrationRuntimeDataProxyProperties(msrest.serialization.Model): """Data proxy properties for a managed dedicated integration runtime. :param connect_via: The self-hosted integration runtime reference. - :type connect_via: ~data_factory_management_client.models.EntityReference + :type connect_via: ~azure.mgmt.datafactory.models.EntityReference :param staging_linked_service: The staging linked service reference. - :type staging_linked_service: ~data_factory_management_client.models.EntityReference + :type staging_linked_service: ~azure.mgmt.datafactory.models.EntityReference :param path: The path to contain the staged data in the Blob storage. :type path: str """ @@ -21665,7 +22342,7 @@ class IntegrationRuntimeDebugResource(SubResourceDebugResource): :param name: The resource name. :type name: str :param properties: Required. Integration runtime properties. - :type properties: ~data_factory_management_client.models.IntegrationRuntime + :type properties: ~azure.mgmt.datafactory.models.IntegrationRuntime """ _validation = { @@ -21694,7 +22371,7 @@ class IntegrationRuntimeListResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. :param value: Required. List of integration runtimes. - :type value: list[~data_factory_management_client.models.IntegrationRuntimeResource] + :type value: list[~azure.mgmt.datafactory.models.IntegrationRuntimeResource] :param next_link: The link to the next page of results, if any remaining results exist. :type next_link: str """ @@ -21726,7 +22403,7 @@ class IntegrationRuntimeMonitoringData(msrest.serialization.Model): :param name: Integration runtime name. :type name: str :param nodes: Integration runtime node monitoring data. - :type nodes: list[~data_factory_management_client.models.IntegrationRuntimeNodeMonitoringData] + :type nodes: list[~azure.mgmt.datafactory.models.IntegrationRuntimeNodeMonitoringData] """ _attribute_map = { @@ -21839,6 +22516,103 @@ def __init__( self.received_bytes = None +class IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint(msrest.serialization.Model): + """Azure-SSIS integration runtime outbound network dependency endpoints for one category. + + :param category: The category of outbound network dependency. + :type category: str + :param endpoints: The endpoints for outbound network dependency. + :type endpoints: + list[~azure.mgmt.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesEndpoint] + """ + + _attribute_map = { + 'category': {'key': 'category', 'type': 'str'}, + 'endpoints': {'key': 'endpoints', 'type': '[IntegrationRuntimeOutboundNetworkDependenciesEndpoint]'}, + } + + def __init__( + self, + *, + category: Optional[str] = None, + endpoints: Optional[List["IntegrationRuntimeOutboundNetworkDependenciesEndpoint"]] = None, + **kwargs + ): + super(IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint, self).__init__(**kwargs) + self.category = category + self.endpoints = endpoints + + +class IntegrationRuntimeOutboundNetworkDependenciesEndpoint(msrest.serialization.Model): + """The endpoint for Azure-SSIS integration runtime outbound network dependency. + + :param domain_name: The domain name of endpoint. + :type domain_name: str + :param endpoint_details: The details of endpoint. + :type endpoint_details: + list[~azure.mgmt.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails] + """ + + _attribute_map = { + 'domain_name': {'key': 'domainName', 'type': 'str'}, + 'endpoint_details': {'key': 'endpointDetails', 'type': '[IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails]'}, + } + + def __init__( + self, + *, + domain_name: Optional[str] = None, + endpoint_details: Optional[List["IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails"]] = None, + **kwargs + ): + super(IntegrationRuntimeOutboundNetworkDependenciesEndpoint, self).__init__(**kwargs) + self.domain_name = domain_name + self.endpoint_details = endpoint_details + + +class IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails(msrest.serialization.Model): + """The details of Azure-SSIS integration runtime outbound network dependency endpoint. + + :param port: The port of endpoint. + :type port: int + """ + + _attribute_map = { + 'port': {'key': 'port', 'type': 'int'}, + } + + def __init__( + self, + *, + port: Optional[int] = None, + **kwargs + ): + super(IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails, self).__init__(**kwargs) + self.port = port + + +class IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse(msrest.serialization.Model): + """Azure-SSIS integration runtime outbound network dependency endpoints. + + :param value: The list of outbound network dependency endpoints. + :type value: + list[~azure.mgmt.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint]'}, + } + + def __init__( + self, + *, + value: Optional[List["IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint"]] = None, + **kwargs + ): + super(IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse, self).__init__(**kwargs) + self.value = value + + class IntegrationRuntimeReference(msrest.serialization.Model): """Integration runtime reference type. @@ -21885,7 +22659,7 @@ class IntegrationRuntimeRegenerateKeyParameters(msrest.serialization.Model): :param key_name: The name of the authentication key to regenerate. Possible values include: "authKey1", "authKey2". - :type key_name: str or ~data_factory_management_client.models.IntegrationRuntimeAuthKeyName + :type key_name: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeAuthKeyName """ _attribute_map = { @@ -21918,7 +22692,7 @@ class IntegrationRuntimeResource(SubResource): :ivar etag: Etag identifies change in the resource. :vartype etag: str :param properties: Required. Integration runtime properties. - :type properties: ~data_factory_management_client.models.IntegrationRuntime + :type properties: ~azure.mgmt.datafactory.models.IntegrationRuntime """ _validation = { @@ -21959,12 +22733,12 @@ class IntegrationRuntimeSsisCatalogInfo(msrest.serialization.Model): :type catalog_admin_user_name: str :param catalog_admin_password: The password of the administrator user account of the catalog database. - :type catalog_admin_password: ~data_factory_management_client.models.SecureString + :type catalog_admin_password: ~azure.mgmt.datafactory.models.SecureString :param catalog_pricing_tier: The pricing tier for the catalog database. The valid values could be found in https://azure.microsoft.com/en-us/pricing/details/sql-database/. Possible values include: "Basic", "Standard", "Premium", "PremiumRS". :type catalog_pricing_tier: str or - ~data_factory_management_client.models.IntegrationRuntimeSsisCatalogPricingTier + ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisCatalogPricingTier :param dual_standby_pair_name: The dual standby pair name of Azure-SSIS Integration Runtimes to support SSISDB failover. :type dual_standby_pair_name: str @@ -22010,27 +22784,28 @@ class IntegrationRuntimeSsisProperties(msrest.serialization.Model): collection. :type additional_properties: dict[str, object] :param catalog_info: Catalog information for managed dedicated integration runtime. - :type catalog_info: ~data_factory_management_client.models.IntegrationRuntimeSsisCatalogInfo + :type catalog_info: ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisCatalogInfo :param license_type: License type for bringing your own license scenario. Possible values include: "BasePrice", "LicenseIncluded". - :type license_type: str or ~data_factory_management_client.models.IntegrationRuntimeLicenseType + :type license_type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeLicenseType :param custom_setup_script_properties: Custom setup script properties for a managed dedicated integration runtime. :type custom_setup_script_properties: - ~data_factory_management_client.models.IntegrationRuntimeCustomSetupScriptProperties + ~azure.mgmt.datafactory.models.IntegrationRuntimeCustomSetupScriptProperties :param data_proxy_properties: Data proxy properties for a managed dedicated integration runtime. :type data_proxy_properties: - ~data_factory_management_client.models.IntegrationRuntimeDataProxyProperties + ~azure.mgmt.datafactory.models.IntegrationRuntimeDataProxyProperties :param edition: The edition for the SSIS Integration Runtime. Possible values include: "Standard", "Enterprise". - :type edition: str or ~data_factory_management_client.models.IntegrationRuntimeEdition + :type edition: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeEdition :param express_custom_setup_properties: Custom setup without script properties for a SSIS integration runtime. - :type express_custom_setup_properties: - list[~data_factory_management_client.models.CustomSetupBase] + :type express_custom_setup_properties: list[~azure.mgmt.datafactory.models.CustomSetupBase] :param package_stores: Package stores for the SSIS Integration Runtime. - :type package_stores: list[~data_factory_management_client.models.PackageStore] + :type package_stores: list[~azure.mgmt.datafactory.models.PackageStore] + :param credential: The credential reference containing authentication information. + :type credential: ~azure.mgmt.datafactory.models.CredentialReference """ _attribute_map = { @@ -22042,6 +22817,7 @@ class IntegrationRuntimeSsisProperties(msrest.serialization.Model): 'edition': {'key': 'edition', 'type': 'str'}, 'express_custom_setup_properties': {'key': 'expressCustomSetupProperties', 'type': '[CustomSetupBase]'}, 'package_stores': {'key': 'packageStores', 'type': '[PackageStore]'}, + 'credential': {'key': 'credential', 'type': 'CredentialReference'}, } def __init__( @@ -22055,6 +22831,7 @@ def __init__( edition: Optional[Union[str, "IntegrationRuntimeEdition"]] = None, express_custom_setup_properties: Optional[List["CustomSetupBase"]] = None, package_stores: Optional[List["PackageStore"]] = None, + credential: Optional["CredentialReference"] = None, **kwargs ): super(IntegrationRuntimeSsisProperties, self).__init__(**kwargs) @@ -22066,6 +22843,7 @@ def __init__( self.edition = edition self.express_custom_setup_properties = express_custom_setup_properties self.package_stores = package_stores + self.credential = credential class IntegrationRuntimeStatus(msrest.serialization.Model): @@ -22083,13 +22861,13 @@ class IntegrationRuntimeStatus(msrest.serialization.Model): :type additional_properties: dict[str, object] :param type: Required. Type of integration runtime.Constant filled by server. Possible values include: "Managed", "SelfHosted". - :type type: str or ~data_factory_management_client.models.IntegrationRuntimeType + :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType :ivar data_factory_name: The data factory name which the integration runtime belong to. :vartype data_factory_name: str :ivar state: The state of integration runtime. Possible values include: "Initial", "Stopped", "Started", "Starting", "Stopping", "NeedRegistration", "Online", "Limited", "Offline", "AccessDenied". - :vartype state: str or ~data_factory_management_client.models.IntegrationRuntimeState + :vartype state: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeState """ _validation = { @@ -22128,7 +22906,7 @@ class IntegrationRuntimeStatusListResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. :param value: Required. List of integration runtime status. - :type value: list[~data_factory_management_client.models.IntegrationRuntimeStatusResponse] + :type value: list[~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse] :param next_link: The link to the next page of results, if any remaining results exist. :type next_link: str """ @@ -22164,7 +22942,7 @@ class IntegrationRuntimeStatusResponse(msrest.serialization.Model): :ivar name: The integration runtime name. :vartype name: str :param properties: Required. Integration runtime properties. - :type properties: ~data_factory_management_client.models.IntegrationRuntimeStatus + :type properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeStatus """ _validation = { @@ -22201,6 +22979,9 @@ class IntegrationRuntimeVNetProperties(msrest.serialization.Model): :param public_i_ps: Resource IDs of the public IP addresses that this integration runtime will use. :type public_i_ps: list[str] + :param subnet_id: The ID of subnet, to which this Azure-SSIS integration runtime will be + joined. + :type subnet_id: str """ _attribute_map = { @@ -22208,6 +22989,7 @@ class IntegrationRuntimeVNetProperties(msrest.serialization.Model): 'v_net_id': {'key': 'vNetId', 'type': 'str'}, 'subnet': {'key': 'subnet', 'type': 'str'}, 'public_i_ps': {'key': 'publicIPs', 'type': '[str]'}, + 'subnet_id': {'key': 'subnetId', 'type': 'str'}, } def __init__( @@ -22217,6 +22999,7 @@ def __init__( v_net_id: Optional[str] = None, subnet: Optional[str] = None, public_i_ps: Optional[List[str]] = None, + subnet_id: Optional[str] = None, **kwargs ): super(IntegrationRuntimeVNetProperties, self).__init__(**kwargs) @@ -22224,6 +23007,7 @@ def __init__( self.v_net_id = v_net_id self.subnet = subnet self.public_i_ps = public_i_ps + self.subnet_id = subnet_id class JiraLinkedService(LinkedService): @@ -22237,11 +23021,11 @@ class JiraLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param host: Required. The IP address or host name of the Jira service. (e.g. @@ -22254,7 +23038,7 @@ class JiraLinkedService(LinkedService): :type username: object :param password: The password corresponding to the user name that you provided in the username field. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. :type use_encrypted_endpoints: object @@ -22343,14 +23127,14 @@ class JiraObjectDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ @@ -22411,12 +23195,15 @@ class JiraSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -22432,8 +23219,9 @@ class JiraSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -22444,12 +23232,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(JiraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(JiraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'JiraSource' # type: str self.query = query @@ -22473,16 +23262,16 @@ class JsonDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param location: The location of the json data storage. - :type location: ~data_factory_management_client.models.DatasetLocation + :type location: ~azure.mgmt.datafactory.models.DatasetLocation :param encoding_name: The code page name of the preferred encoding. If not specified, the default value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column of the table in the following link to set supported values: @@ -22490,7 +23279,7 @@ class JsonDataset(Dataset): resultType string). :type encoding_name: object :param compression: The data compression method used for the json dataset. - :type compression: ~data_factory_management_client.models.DatasetCompression + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression """ _validation = { @@ -22551,9 +23340,8 @@ class JsonFormat(DatasetStorageFormat): :param deserializer: Deserializer. Type: string (or Expression with resultType string). :type deserializer: object :param file_pattern: File pattern of JSON. To be more specific, the way of separating a - collection of JSON objects. The default value is 'setOfObjects'. It is case-sensitive. Possible - values include: "setOfObjects", "arrayOfObjects". - :type file_pattern: str or ~data_factory_management_client.models.JsonFormatFilePattern + collection of JSON objects. The default value is 'setOfObjects'. It is case-sensitive. + :type file_pattern: object :param nesting_separator: The character used to separate nesting levels. Default value is '.' (dot). Type: string (or Expression with resultType string). :type nesting_separator: object @@ -22583,7 +23371,7 @@ class JsonFormat(DatasetStorageFormat): 'type': {'key': 'type', 'type': 'str'}, 'serializer': {'key': 'serializer', 'type': 'object'}, 'deserializer': {'key': 'deserializer', 'type': 'object'}, - 'file_pattern': {'key': 'filePattern', 'type': 'str'}, + 'file_pattern': {'key': 'filePattern', 'type': 'object'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, 'encoding_name': {'key': 'encodingName', 'type': 'object'}, 'json_node_reference': {'key': 'jsonNodeReference', 'type': 'object'}, @@ -22596,7 +23384,7 @@ def __init__( additional_properties: Optional[Dict[str, object]] = None, serializer: Optional[object] = None, deserializer: Optional[object] = None, - file_pattern: Optional[Union[str, "JsonFormatFilePattern"]] = None, + file_pattern: Optional[object] = None, nesting_separator: Optional[object] = None, encoding_name: Optional[object] = None, json_node_reference: Optional[object] = None, @@ -22623,7 +23411,7 @@ class JsonReadSettings(FormatReadSettings): :param type: Required. The read setting type.Constant filled by server. :type type: str :param compression_properties: Compression settings. - :type compression_properties: ~data_factory_management_client.models.CompressionReadSettings + :type compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings """ _validation = { @@ -22673,10 +23461,13 @@ class JsonSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Json store settings. - :type store_settings: ~data_factory_management_client.models.StoreWriteSettings + :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings :param format_settings: Json format settings. - :type format_settings: ~data_factory_management_client.models.JsonWriteSettings + :type format_settings: ~azure.mgmt.datafactory.models.JsonWriteSettings """ _validation = { @@ -22691,6 +23482,7 @@ class JsonSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'JsonWriteSettings'}, } @@ -22704,11 +23496,12 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, store_settings: Optional["StoreWriteSettings"] = None, format_settings: Optional["JsonWriteSettings"] = None, **kwargs ): - super(JsonSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(JsonSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'JsonSink' # type: str self.store_settings = store_settings self.format_settings = format_settings @@ -22733,13 +23526,16 @@ class JsonSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Json store settings. - :type store_settings: ~data_factory_management_client.models.StoreReadSettings + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :param format_settings: Json format settings. - :type format_settings: ~data_factory_management_client.models.JsonReadSettings + :type format_settings: ~azure.mgmt.datafactory.models.JsonReadSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -22752,9 +23548,10 @@ class JsonSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'JsonReadSettings'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -22764,12 +23561,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, store_settings: Optional["StoreReadSettings"] = None, format_settings: Optional["JsonReadSettings"] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, **kwargs ): - super(JsonSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(JsonSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'JsonSource' # type: str self.store_settings = store_settings self.format_settings = format_settings @@ -22787,9 +23585,8 @@ class JsonWriteSettings(FormatWriteSettings): :param type: Required. The write setting type.Constant filled by server. :type type: str :param file_pattern: File pattern of JSON. This setting controls the way a collection of JSON - objects will be treated. The default value is 'setOfObjects'. It is case-sensitive. Possible - values include: "setOfObjects", "arrayOfObjects". - :type file_pattern: str or ~data_factory_management_client.models.JsonWriteFilePattern + objects will be treated. The default value is 'setOfObjects'. It is case-sensitive. + :type file_pattern: object """ _validation = { @@ -22799,14 +23596,14 @@ class JsonWriteSettings(FormatWriteSettings): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'file_pattern': {'key': 'filePattern', 'type': 'str'}, + 'file_pattern': {'key': 'filePattern', 'type': 'object'}, } def __init__( self, *, additional_properties: Optional[Dict[str, object]] = None, - file_pattern: Optional[Union[str, "JsonWriteFilePattern"]] = None, + file_pattern: Optional[object] = None, **kwargs ): super(JsonWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) @@ -22903,7 +23700,7 @@ class LinkedIntegrationRuntimeKeyAuthorization(LinkedIntegrationRuntimeType): sharing.Constant filled by server. :type authorization_type: str :param key: Required. The key used for authorization. - :type key: ~data_factory_management_client.models.SecureString + :type key: ~azure.mgmt.datafactory.models.SecureString """ _validation = { @@ -22995,7 +23792,7 @@ class LinkedServiceDebugResource(SubResourceDebugResource): :param name: The resource name. :type name: str :param properties: Required. Properties of linked service. - :type properties: ~data_factory_management_client.models.LinkedService + :type properties: ~azure.mgmt.datafactory.models.LinkedService """ _validation = { @@ -23024,7 +23821,7 @@ class LinkedServiceListResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. :param value: Required. List of linked services. - :type value: list[~data_factory_management_client.models.LinkedServiceResource] + :type value: list[~azure.mgmt.datafactory.models.LinkedServiceResource] :param next_link: The link to the next page of results, if any remaining results exist. :type next_link: str """ @@ -23106,7 +23903,7 @@ class LinkedServiceResource(SubResource): :ivar etag: Etag identifies change in the resource. :vartype etag: str :param properties: Required. Properties of linked service. - :type properties: ~data_factory_management_client.models.LinkedService + :type properties: ~azure.mgmt.datafactory.models.LinkedService """ _validation = { @@ -23141,7 +23938,7 @@ class LogLocationSettings(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. :param linked_service_name: Required. Log storage linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param path: The path to storage for storing detailed logs of activity execution. Type: string (or Expression with resultType string). :type path: object @@ -23177,11 +23974,10 @@ class LogSettings(msrest.serialization.Model): (or Expression with resultType boolean). :type enable_copy_activity_log: object :param copy_activity_log_settings: Specifies settings for copy activity log. - :type copy_activity_log_settings: - ~data_factory_management_client.models.CopyActivityLogSettings + :type copy_activity_log_settings: ~azure.mgmt.datafactory.models.CopyActivityLogSettings :param log_location_settings: Required. Log location settings customer needs to provide when enabling log. - :type log_location_settings: ~data_factory_management_client.models.LogLocationSettings + :type log_location_settings: ~azure.mgmt.datafactory.models.LogLocationSettings """ _validation = { @@ -23217,7 +24013,7 @@ class LogStorageSettings(msrest.serialization.Model): collection. :type additional_properties: dict[str, object] :param linked_service_name: Required. Log storage linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param path: The path to storage for storing detailed logs of activity execution. Type: string (or Expression with resultType string). :type path: object @@ -23274,17 +24070,17 @@ class LookupActivity(ExecutionActivity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param linked_service_name: Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. - :type policy: ~data_factory_management_client.models.ActivityPolicy + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param source: Required. Dataset-specific source properties, same as copy activity source. - :type source: ~data_factory_management_client.models.CopySource + :type source: ~azure.mgmt.datafactory.models.CopySource :param dataset: Required. Lookup activity dataset reference. - :type dataset: ~data_factory_management_client.models.DatasetReference + :type dataset: ~azure.mgmt.datafactory.models.DatasetReference :param first_row_only: Whether to return first row or all rows. Default value is true. Type: boolean (or Expression with resultType boolean). :type first_row_only: object @@ -23344,17 +24140,17 @@ class MagentoLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param host: Required. The URL of the Magento instance. (i.e. 192.168.222.110/magento3). :type host: object :param access_token: The access token from Magento. - :type access_token: ~data_factory_management_client.models.SecretBase + :type access_token: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. :type use_encrypted_endpoints: object @@ -23436,14 +24232,14 @@ class MagentoObjectDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ @@ -23504,12 +24300,15 @@ class MagentoSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -23525,8 +24324,9 @@ class MagentoSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -23537,16 +24337,61 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(MagentoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(MagentoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'MagentoSource' # type: str self.query = query +class ManagedIdentityCredential(Credential): + """Managed identity credential. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of credential.Constant filled by server. + :type type: str + :param description: Credential description. + :type description: str + :param annotations: List of tags that can be used for describing the Credential. + :type annotations: list[object] + :param resource_id: The resource id of user assigned managed identity. + :type resource_id: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'resource_id': {'key': 'typeProperties.resourceId', 'type': 'str'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + annotations: Optional[List[object]] = None, + resource_id: Optional[str] = None, + **kwargs + ): + super(ManagedIdentityCredential, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) + self.type = 'ManagedIdentity' # type: str + self.resource_id = resource_id + + class ManagedIntegrationRuntime(IntegrationRuntime): """Managed integration runtime, including managed elastic and managed dedicated integration runtimes. @@ -23559,21 +24404,19 @@ class ManagedIntegrationRuntime(IntegrationRuntime): :type additional_properties: dict[str, object] :param type: Required. Type of integration runtime.Constant filled by server. Possible values include: "Managed", "SelfHosted". - :type type: str or ~data_factory_management_client.models.IntegrationRuntimeType + :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType :param description: Integration runtime description. :type description: str :ivar state: Integration runtime state, only valid for managed dedicated integration runtime. Possible values include: "Initial", "Stopped", "Started", "Starting", "Stopping", "NeedRegistration", "Online", "Limited", "Offline", "AccessDenied". - :vartype state: str or ~data_factory_management_client.models.IntegrationRuntimeState + :vartype state: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeState :param managed_virtual_network: Managed Virtual Network reference. - :type managed_virtual_network: - ~data_factory_management_client.models.ManagedVirtualNetworkReference + :type managed_virtual_network: ~azure.mgmt.datafactory.models.ManagedVirtualNetworkReference :param compute_properties: The compute resource for managed integration runtime. - :type compute_properties: - ~data_factory_management_client.models.IntegrationRuntimeComputeProperties + :type compute_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeComputeProperties :param ssis_properties: SSIS properties for managed integration runtime. - :type ssis_properties: ~data_factory_management_client.models.IntegrationRuntimeSsisProperties + :type ssis_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisProperties """ _validation = { @@ -23668,10 +24511,9 @@ class ManagedIntegrationRuntimeNode(msrest.serialization.Model): :vartype node_id: str :ivar status: The managed integration runtime node status. Possible values include: "Starting", "Available", "Recycling", "Unavailable". - :vartype status: str or - ~data_factory_management_client.models.ManagedIntegrationRuntimeNodeStatus + :vartype status: str or ~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeNodeStatus :param errors: The errors that occurred on this integration runtime node. - :type errors: list[~data_factory_management_client.models.ManagedIntegrationRuntimeError] + :type errors: list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeError] """ _validation = { @@ -23769,23 +24611,22 @@ class ManagedIntegrationRuntimeStatus(IntegrationRuntimeStatus): :type additional_properties: dict[str, object] :param type: Required. Type of integration runtime.Constant filled by server. Possible values include: "Managed", "SelfHosted". - :type type: str or ~data_factory_management_client.models.IntegrationRuntimeType + :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType :ivar data_factory_name: The data factory name which the integration runtime belong to. :vartype data_factory_name: str :ivar state: The state of integration runtime. Possible values include: "Initial", "Stopped", "Started", "Starting", "Stopping", "NeedRegistration", "Online", "Limited", "Offline", "AccessDenied". - :vartype state: str or ~data_factory_management_client.models.IntegrationRuntimeState + :vartype state: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeState :ivar create_time: The time at which the integration runtime was created, in ISO8601 format. :vartype create_time: ~datetime.datetime :ivar nodes: The list of nodes for managed integration runtime. - :vartype nodes: list[~data_factory_management_client.models.ManagedIntegrationRuntimeNode] + :vartype nodes: list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeNode] :ivar other_errors: The errors that occurred on this integration runtime. - :vartype other_errors: - list[~data_factory_management_client.models.ManagedIntegrationRuntimeError] + :vartype other_errors: list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeError] :ivar last_operation: The last operation result that occurred on this integration runtime. :vartype last_operation: - ~data_factory_management_client.models.ManagedIntegrationRuntimeOperationResult + ~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeOperationResult """ _validation = { @@ -23832,7 +24673,7 @@ class ManagedPrivateEndpoint(msrest.serialization.Model): collection. :type additional_properties: dict[str, object] :param connection_state: The managed private endpoint connection state. - :type connection_state: ~data_factory_management_client.models.ConnectionStateProperties + :type connection_state: ~azure.mgmt.datafactory.models.ConnectionStateProperties :param fqdns: Fully qualified domain names. :type fqdns: list[str] :param group_id: The groupId to which the managed private endpoint is created. @@ -23887,7 +24728,7 @@ class ManagedPrivateEndpointListResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. :param value: Required. List of managed private endpoints. - :type value: list[~data_factory_management_client.models.ManagedPrivateEndpointResource] + :type value: list[~azure.mgmt.datafactory.models.ManagedPrivateEndpointResource] :param next_link: The link to the next page of results, if any remaining results exist. :type next_link: str """ @@ -23929,7 +24770,7 @@ class ManagedPrivateEndpointResource(SubResource): :ivar etag: Etag identifies change in the resource. :vartype etag: str :param properties: Required. Managed private endpoint properties. - :type properties: ~data_factory_management_client.models.ManagedPrivateEndpoint + :type properties: ~azure.mgmt.datafactory.models.ManagedPrivateEndpoint """ _validation = { @@ -24001,7 +24842,7 @@ class ManagedVirtualNetworkListResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. :param value: Required. List of managed Virtual Networks. - :type value: list[~data_factory_management_client.models.ManagedVirtualNetworkResource] + :type value: list[~azure.mgmt.datafactory.models.ManagedVirtualNetworkResource] :param next_link: The link to the next page of results, if any remaining results exist. :type next_link: str """ @@ -24079,7 +24920,7 @@ class ManagedVirtualNetworkResource(SubResource): :ivar etag: Etag identifies change in the resource. :vartype etag: str :param properties: Required. Managed Virtual Network properties. - :type properties: ~data_factory_management_client.models.ManagedVirtualNetwork + :type properties: ~azure.mgmt.datafactory.models.ManagedVirtualNetwork """ _validation = { @@ -24111,7 +24952,9 @@ def __init__( class MappingDataFlow(DataFlow): """Mapping data flow. - :param type: Type of data flow.Constant filled by server. + All required parameters must be populated in order to send to Azure. + + :param type: Required. Type of data flow.Constant filled by server. :type type: str :param description: The description of the data flow. :type description: str @@ -24119,17 +24962,21 @@ class MappingDataFlow(DataFlow): :type annotations: list[object] :param folder: The folder that this data flow is in. If not specified, Data flow will appear at the root level. - :type folder: ~data_factory_management_client.models.DataFlowFolder + :type folder: ~azure.mgmt.datafactory.models.DataFlowFolder :param sources: List of sources in data flow. - :type sources: list[~data_factory_management_client.models.DataFlowSource] + :type sources: list[~azure.mgmt.datafactory.models.DataFlowSource] :param sinks: List of sinks in data flow. - :type sinks: list[~data_factory_management_client.models.DataFlowSink] + :type sinks: list[~azure.mgmt.datafactory.models.DataFlowSink] :param transformations: List of transformations in data flow. - :type transformations: list[~data_factory_management_client.models.Transformation] + :type transformations: list[~azure.mgmt.datafactory.models.Transformation] :param script: DataFlow script. :type script: str """ + _validation = { + 'type': {'required': True}, + } + _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, @@ -24172,18 +25019,18 @@ class MariaDbLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param pwd: The Azure key vault secret reference of password in connection string. - :type pwd: ~data_factory_management_client.models.AzureKeyVaultSecretReference + :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -24245,12 +25092,15 @@ class MariaDbSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -24266,8 +25116,9 @@ class MariaDbSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -24278,12 +25129,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(MariaDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(MariaDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'MariaDBSource' # type: str self.query = query @@ -24307,14 +25159,14 @@ class MariaDbTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ @@ -24367,11 +25219,11 @@ class MarketoLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param endpoint: Required. The endpoint of the Marketo server. (i.e. 123-ABC-321.mktorest.com). @@ -24379,7 +25231,7 @@ class MarketoLinkedService(LinkedService): :param client_id: Required. The client Id of your Marketo service. :type client_id: object :param client_secret: The client secret of your Marketo service. - :type client_secret: ~data_factory_management_client.models.SecretBase + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. :type use_encrypted_endpoints: object @@ -24465,14 +25317,14 @@ class MarketoObjectDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ @@ -24533,12 +25385,15 @@ class MarketoSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -24554,8 +25409,9 @@ class MarketoSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -24566,16 +25422,43 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(MarketoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(MarketoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'MarketoSource' # type: str self.query = query +class MetadataItem(msrest.serialization.Model): + """Specify the name and value of custom metadata item. + + :param name: Metadata item key name. Type: string (or Expression with resultType string). + :type name: object + :param value: Metadata item value. Type: string (or Expression with resultType string). + :type value: object + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'object'}, + 'value': {'key': 'value', 'type': 'object'}, + } + + def __init__( + self, + *, + name: Optional[object] = None, + value: Optional[object] = None, + **kwargs + ): + super(MetadataItem, self).__init__(**kwargs) + self.name = name + self.value = value + + class MicrosoftAccessLinkedService(LinkedService): """Microsoft Access linked service. @@ -24587,11 +25470,11 @@ class MicrosoftAccessLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: Required. The non-access credential portion of the connection string @@ -24604,12 +25487,12 @@ class MicrosoftAccessLinkedService(LinkedService): :type authentication_type: object :param credential: The access credential portion of the connection string specified in driver- specific property-value format. - :type credential: ~data_factory_management_client.models.SecretBase + :type credential: ~azure.mgmt.datafactory.models.SecretBase :param user_name: User name for Basic authentication. Type: string (or Expression with resultType string). :type user_name: object :param password: Password for Basic authentication. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -24687,6 +25570,9 @@ class MicrosoftAccessSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). :type pre_copy_script: object @@ -24704,6 +25590,7 @@ class MicrosoftAccessSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -24716,10 +25603,11 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, pre_copy_script: Optional[object] = None, **kwargs ): - super(MicrosoftAccessSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(MicrosoftAccessSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'MicrosoftAccessSink' # type: str self.pre_copy_script = pre_copy_script @@ -24743,11 +25631,14 @@ class MicrosoftAccessSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -24760,8 +25651,9 @@ class MicrosoftAccessSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -24771,11 +25663,12 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, **kwargs ): - super(MicrosoftAccessSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(MicrosoftAccessSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'MicrosoftAccessSource' # type: str self.query = query self.additional_columns = additional_columns @@ -24800,14 +25693,14 @@ class MicrosoftAccessTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The Microsoft Access table name. Type: string (or Expression with resultType string). :type table_name: object @@ -24869,14 +25762,14 @@ class MongoDbAtlasCollectionDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param collection: Required. The collection name of the MongoDB Atlas database. Type: string (or Expression with resultType string). :type collection: object @@ -24931,11 +25824,11 @@ class MongoDbAtlasLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: Required. The MongoDB Atlas connection string. Type: string, @@ -24982,6 +25875,74 @@ def __init__( self.database = database +class MongoDbAtlasSink(CopySink): + """A copy activity MongoDB Atlas sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object + :param write_behavior: Specifies whether the document with same key to be overwritten (upsert) + rather than throw exception (insert). The default value is "insert". Type: string (or + Expression with resultType string). Type: string (or Expression with resultType string). + :type write_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, + write_behavior: Optional[object] = None, + **kwargs + ): + super(MongoDbAtlasSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) + self.type = 'MongoDbAtlasSink' # type: str + self.write_behavior = write_behavior + + class MongoDbAtlasSource(CopySource): """A copy activity source for a MongoDB Atlas database. @@ -25001,12 +25962,15 @@ class MongoDbAtlasSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param filter: Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). :type filter: object :param cursor_methods: Cursor methods for Mongodb query. - :type cursor_methods: ~data_factory_management_client.models.MongoDbCursorMethodsProperties + :type cursor_methods: ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties :param batch_size: Specifies the number of documents to return in each batch of the response from MongoDB Atlas instance. In most cases, modifying the batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response @@ -25016,8 +25980,8 @@ class MongoDbAtlasSource(CopySource): pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -25030,11 +25994,12 @@ class MongoDbAtlasSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'filter': {'key': 'filter', 'type': 'object'}, 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -25044,14 +26009,15 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, filter: Optional[object] = None, cursor_methods: Optional["MongoDbCursorMethodsProperties"] = None, batch_size: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, **kwargs ): - super(MongoDbAtlasSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(MongoDbAtlasSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'MongoDbAtlasSource' # type: str self.filter = filter self.cursor_methods = cursor_methods @@ -25079,14 +26045,14 @@ class MongoDbCollectionDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param collection_name: Required. The table name of the MongoDB database. Type: string (or Expression with resultType string). :type collection_name: object @@ -25190,11 +26156,11 @@ class MongoDbLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param server: Required. The IP address or server name of the MongoDB server. Type: string (or @@ -25202,8 +26168,7 @@ class MongoDbLinkedService(LinkedService): :type server: object :param authentication_type: The authentication type to be used to connect to the MongoDB database. Possible values include: "Basic", "Anonymous". - :type authentication_type: str or - ~data_factory_management_client.models.MongoDbAuthenticationType + :type authentication_type: str or ~azure.mgmt.datafactory.models.MongoDbAuthenticationType :param database_name: Required. The name of the MongoDB database that you want to access. Type: string (or Expression with resultType string). :type database_name: object @@ -25211,7 +26176,7 @@ class MongoDbLinkedService(LinkedService): string). :type username: object :param password: Password for authentication. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param auth_source: Database to verify the username and password. Type: string (or Expression with resultType string). :type auth_source: object @@ -25308,12 +26273,15 @@ class MongoDbSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: Database query. Should be a SQL-92 query expression. Type: string (or Expression with resultType string). :type query: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -25326,8 +26294,9 @@ class MongoDbSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -25337,11 +26306,12 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, **kwargs ): - super(MongoDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(MongoDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'MongoDbSource' # type: str self.query = query self.additional_columns = additional_columns @@ -25366,14 +26336,14 @@ class MongoDbV2CollectionDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param collection: Required. The collection name of the MongoDB database. Type: string (or Expression with resultType string). :type collection: object @@ -25428,11 +26398,11 @@ class MongoDbV2LinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: Required. The MongoDB connection string. Type: string, SecureString @@ -25478,6 +26448,74 @@ def __init__( self.database = database +class MongoDbV2Sink(CopySink): + """A copy activity MongoDB sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object + :param write_behavior: Specifies whether the document with same key to be overwritten (upsert) + rather than throw exception (insert). The default value is "insert". Type: string (or + Expression with resultType string). Type: string (or Expression with resultType string). + :type write_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, + write_behavior: Optional[object] = None, + **kwargs + ): + super(MongoDbV2Sink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) + self.type = 'MongoDbV2Sink' # type: str + self.write_behavior = write_behavior + + class MongoDbV2Source(CopySource): """A copy activity source for a MongoDB database. @@ -25497,12 +26535,15 @@ class MongoDbV2Source(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param filter: Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). :type filter: object :param cursor_methods: Cursor methods for Mongodb query. - :type cursor_methods: ~data_factory_management_client.models.MongoDbCursorMethodsProperties + :type cursor_methods: ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties :param batch_size: Specifies the number of documents to return in each batch of the response from MongoDB instance. In most cases, modifying the batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response size. @@ -25512,8 +26553,8 @@ class MongoDbV2Source(CopySource): pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -25526,11 +26567,12 @@ class MongoDbV2Source(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'filter': {'key': 'filter', 'type': 'object'}, 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -25540,14 +26582,15 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, filter: Optional[object] = None, cursor_methods: Optional["MongoDbCursorMethodsProperties"] = None, batch_size: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, **kwargs ): - super(MongoDbV2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(MongoDbV2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'MongoDbV2Source' # type: str self.filter = filter self.cursor_methods = cursor_methods @@ -25567,17 +26610,17 @@ class MySqlLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: Required. The connection string. :type connection_string: object :param password: The Azure key vault secret reference of password in connection string. - :type password: ~data_factory_management_client.models.AzureKeyVaultSecretReference + :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -25640,12 +26683,15 @@ class MySqlSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object """ @@ -25660,8 +26706,9 @@ class MySqlSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -25672,12 +26719,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(MySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(MySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'MySqlSource' # type: str self.query = query @@ -25701,14 +26749,14 @@ class MySqlTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The MySQL table name. Type: string (or Expression with resultType string). :type table_name: object """ @@ -25761,18 +26809,18 @@ class NetezzaLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param pwd: The Azure key vault secret reference of password in connection string. - :type pwd: ~data_factory_management_client.models.AzureKeyVaultSecretReference + :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -25870,12 +26918,15 @@ class NetezzaSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -25883,7 +26934,7 @@ class NetezzaSource(TabularSource): parallel. Possible values include: "None", "DataSlice", "DynamicRange". :type partition_option: object :param partition_settings: The settings that will be leveraged for Netezza source partitioning. - :type partition_settings: ~data_factory_management_client.models.NetezzaPartitionSettings + :type partition_settings: ~azure.mgmt.datafactory.models.NetezzaPartitionSettings """ _validation = { @@ -25896,8 +26947,9 @@ class NetezzaSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'object'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'NetezzaPartitionSettings'}, @@ -25910,14 +26962,15 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, partition_option: Optional[object] = None, partition_settings: Optional["NetezzaPartitionSettings"] = None, **kwargs ): - super(NetezzaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(NetezzaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'NetezzaSource' # type: str self.query = query self.partition_option = partition_option @@ -25943,14 +26996,14 @@ class NetezzaTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. :type table_name: object @@ -26016,11 +27069,11 @@ class ODataLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param url: Required. The URL of the OData service endpoint. Type: string (or Expression with @@ -26029,13 +27082,12 @@ class ODataLinkedService(LinkedService): :param authentication_type: Type of authentication used to connect to the OData service. Possible values include: "Basic", "Anonymous", "Windows", "AadServicePrincipal", "ManagedServiceIdentity". - :type authentication_type: str or - ~data_factory_management_client.models.ODataAuthenticationType + :type authentication_type: str or ~azure.mgmt.datafactory.models.ODataAuthenticationType :param user_name: User name of the OData service. Type: string (or Expression with resultType string). :type user_name: object :param password: Password of the OData service. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param auth_headers: The additional HTTP headers in the request to RESTful API used for authorization. Type: object (or Expression with resultType object). :type auth_headers: object @@ -26055,19 +27107,18 @@ class ODataLinkedService(LinkedService): :param aad_service_principal_credential_type: Specify the credential type (key or cert) is used for service principal. Possible values include: "ServicePrincipalKey", "ServicePrincipalCert". :type aad_service_principal_credential_type: str or - ~data_factory_management_client.models.ODataAadServicePrincipalCredentialType + ~azure.mgmt.datafactory.models.ODataAadServicePrincipalCredentialType :param service_principal_key: Specify the secret of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). - :type service_principal_key: ~data_factory_management_client.models.SecretBase + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param service_principal_embedded_cert: Specify the base64 encoded certificate of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). - :type service_principal_embedded_cert: ~data_factory_management_client.models.SecretBase + :type service_principal_embedded_cert: ~azure.mgmt.datafactory.models.SecretBase :param service_principal_embedded_cert_password: Specify the password of your certificate if your certificate has a password and you are using AadServicePrincipal authentication. Type: string (or Expression with resultType string). - :type service_principal_embedded_cert_password: - ~data_factory_management_client.models.SecretBase + :type service_principal_embedded_cert_password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -26163,14 +27214,14 @@ class ODataResourceDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param path: The OData resource path. Type: string (or Expression with resultType string). :type path: object """ @@ -26231,6 +27282,9 @@ class ODataSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: OData query. For example, "$top=1". Type: string (or Expression with resultType string). :type query: object @@ -26240,8 +27294,8 @@ class ODataSource(CopySource): ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type http_request_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -26254,9 +27308,10 @@ class ODataSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -26266,12 +27321,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query: Optional[object] = None, http_request_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, **kwargs ): - super(ODataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(ODataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'ODataSource' # type: str self.query = query self.http_request_timeout = http_request_timeout @@ -26289,11 +27345,11 @@ class OdbcLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: Required. The non-access credential portion of the connection string @@ -26305,12 +27361,12 @@ class OdbcLinkedService(LinkedService): :type authentication_type: object :param credential: The access credential portion of the connection string specified in driver- specific property-value format. - :type credential: ~data_factory_management_client.models.SecretBase + :type credential: ~azure.mgmt.datafactory.models.SecretBase :param user_name: User name for Basic authentication. Type: string (or Expression with resultType string). :type user_name: object :param password: Password for Basic authentication. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -26388,6 +27444,9 @@ class OdbcSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). :type pre_copy_script: object @@ -26405,6 +27464,7 @@ class OdbcSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -26417,10 +27477,11 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, pre_copy_script: Optional[object] = None, **kwargs ): - super(OdbcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(OdbcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'OdbcSink' # type: str self.pre_copy_script = pre_copy_script @@ -26444,12 +27505,15 @@ class OdbcSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object """ @@ -26464,8 +27528,9 @@ class OdbcSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -26476,12 +27541,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(OdbcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(OdbcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'OdbcSource' # type: str self.query = query @@ -26505,14 +27571,14 @@ class OdbcTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The ODBC table name. Type: string (or Expression with resultType string). :type table_name: object """ @@ -26573,14 +27639,14 @@ class Office365Dataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: Required. Name of the dataset to extract from Office 365. Type: string (or Expression with resultType string). :type table_name: object @@ -26641,11 +27707,11 @@ class Office365LinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param office365_tenant_id: Required. Azure tenant ID to which the Office 365 account belongs. @@ -26658,7 +27724,7 @@ class Office365LinkedService(LinkedService): Expression with resultType string). :type service_principal_id: object :param service_principal_key: Required. Specify the application's key. - :type service_principal_key: ~data_factory_management_client.models.SecretBase + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -26730,6 +27796,9 @@ class Office365Source(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param allowed_groups: The groups containing all the users. Type: array of strings (or Expression with resultType array of strings). :type allowed_groups: object @@ -26761,6 +27830,7 @@ class Office365Source(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'allowed_groups': {'key': 'allowedGroups', 'type': 'object'}, 'user_scope_filter_uri': {'key': 'userScopeFilterUri', 'type': 'object'}, 'date_filter_column': {'key': 'dateFilterColumn', 'type': 'object'}, @@ -26776,6 +27846,7 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, allowed_groups: Optional[object] = None, user_scope_filter_uri: Optional[object] = None, date_filter_column: Optional[object] = None, @@ -26784,7 +27855,7 @@ def __init__( output_columns: Optional[object] = None, **kwargs ): - super(Office365Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(Office365Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'Office365Source' # type: str self.allowed_groups = allowed_groups self.user_scope_filter_uri = user_scope_filter_uri @@ -26802,10 +27873,9 @@ class Operation(msrest.serialization.Model): :param origin: The intended executor of the operation. :type origin: str :param display: Metadata associated with the operation. - :type display: ~data_factory_management_client.models.OperationDisplay + :type display: ~azure.mgmt.datafactory.models.OperationDisplay :param service_specification: Details about a service operation. - :type service_specification: - ~data_factory_management_client.models.OperationServiceSpecification + :type service_specification: ~azure.mgmt.datafactory.models.OperationServiceSpecification """ _attribute_map = { @@ -26871,7 +27941,7 @@ class OperationListResponse(msrest.serialization.Model): """A list of operations that can be performed by the Data Factory service. :param value: List of Data Factory operations supported by the Data Factory resource provider. - :type value: list[~data_factory_management_client.models.Operation] + :type value: list[~azure.mgmt.datafactory.models.Operation] :param next_link: The link to the next page of results, if any remaining results exist. :type next_link: str """ @@ -27001,9 +28071,9 @@ class OperationMetricSpecification(msrest.serialization.Model): :param source_mdm_namespace: The name of the MDM namespace. :type source_mdm_namespace: str :param availabilities: Defines how often data for metrics becomes available. - :type availabilities: list[~data_factory_management_client.models.OperationMetricAvailability] + :type availabilities: list[~azure.mgmt.datafactory.models.OperationMetricAvailability] :param dimensions: Defines the metric dimension. - :type dimensions: list[~data_factory_management_client.models.OperationMetricDimension] + :type dimensions: list[~azure.mgmt.datafactory.models.OperationMetricDimension] """ _attribute_map = { @@ -27051,11 +28121,9 @@ class OperationServiceSpecification(msrest.serialization.Model): """Details about a service operation. :param log_specifications: Details about operations related to logs. - :type log_specifications: - list[~data_factory_management_client.models.OperationLogSpecification] + :type log_specifications: list[~azure.mgmt.datafactory.models.OperationLogSpecification] :param metric_specifications: Details about operations related to metrics. - :type metric_specifications: - list[~data_factory_management_client.models.OperationMetricSpecification] + :type metric_specifications: list[~azure.mgmt.datafactory.models.OperationMetricSpecification] """ _attribute_map = { @@ -27086,11 +28154,11 @@ class OracleCloudStorageLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param access_key_id: The access key identifier of the Oracle Cloud Storage Identity and Access @@ -27098,7 +28166,7 @@ class OracleCloudStorageLinkedService(LinkedService): :type access_key_id: object :param secret_access_key: The secret access key of the Oracle Cloud Storage Identity and Access Management (IAM) user. - :type secret_access_key: ~data_factory_management_client.models.SecretBase + :type secret_access_key: ~azure.mgmt.datafactory.models.SecretBase :param service_url: This value specifies the endpoint to access with the Oracle Cloud Storage Connector. This is an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType @@ -27215,6 +28283,9 @@ class OracleCloudStorageReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -27255,6 +28326,7 @@ class OracleCloudStorageReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -27272,6 +28344,7 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, recursive: Optional[object] = None, wildcard_folder_path: Optional[object] = None, wildcard_file_name: Optional[object] = None, @@ -27284,7 +28357,7 @@ def __init__( modified_datetime_end: Optional[object] = None, **kwargs ): - super(OracleCloudStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(OracleCloudStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'OracleCloudStorageReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path @@ -27309,18 +28382,18 @@ class OracleLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: Required. The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param password: The Azure key vault secret reference of password in connection string. - :type password: ~data_factory_management_client.models.AzureKeyVaultSecretReference + :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -27416,11 +28489,11 @@ class OracleServiceCloudLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param host: Required. The URL of the Oracle Service Cloud instance. @@ -27429,7 +28502,7 @@ class OracleServiceCloudLinkedService(LinkedService): :type username: object :param password: Required. The password corresponding to the user name that you provided in the username key. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). :type use_encrypted_endpoints: object @@ -27517,14 +28590,14 @@ class OracleServiceCloudObjectDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ @@ -27585,12 +28658,15 @@ class OracleServiceCloudSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -27606,8 +28682,9 @@ class OracleServiceCloudSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -27618,12 +28695,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(OracleServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(OracleServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'OracleServiceCloudSource' # type: str self.query = query @@ -27653,6 +28731,9 @@ class OracleSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). :type pre_copy_script: object @@ -27670,6 +28751,7 @@ class OracleSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -27682,10 +28764,11 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, pre_copy_script: Optional[object] = None, **kwargs ): - super(OracleSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(OracleSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'OracleSink' # type: str self.pre_copy_script = pre_copy_script @@ -27709,6 +28792,9 @@ class OracleSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param oracle_reader_query: Oracle reader query. Type: string (or Expression with resultType string). :type oracle_reader_query: object @@ -27719,10 +28805,10 @@ class OracleSource(CopySource): Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". :type partition_option: object :param partition_settings: The settings that will be leveraged for Oracle source partitioning. - :type partition_settings: ~data_factory_management_client.models.OraclePartitionSettings + :type partition_settings: ~azure.mgmt.datafactory.models.OraclePartitionSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -27735,11 +28821,12 @@ class OracleSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'oracle_reader_query': {'key': 'oracleReaderQuery', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'object'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'OraclePartitionSettings'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -27749,14 +28836,15 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, oracle_reader_query: Optional[object] = None, query_timeout: Optional[object] = None, partition_option: Optional[object] = None, partition_settings: Optional["OraclePartitionSettings"] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, **kwargs ): - super(OracleSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(OracleSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'OracleSource' # type: str self.oracle_reader_query = oracle_reader_query self.query_timeout = query_timeout @@ -27784,14 +28872,14 @@ class OracleTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. :type table_name: object @@ -27865,18 +28953,19 @@ class OrcDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param location: The location of the ORC data storage. - :type location: ~data_factory_management_client.models.DatasetLocation - :param orc_compression_codec: Possible values include: "none", "zlib", "snappy", "lzo". - :type orc_compression_codec: str or ~data_factory_management_client.models.OrcCompressionCodec + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param orc_compression_codec: The data orcCompressionCodec. Type: string (or Expression with + resultType string). + :type orc_compression_codec: object """ _validation = { @@ -27895,7 +28984,7 @@ class OrcDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'orc_compression_codec': {'key': 'typeProperties.orcCompressionCodec', 'type': 'str'}, + 'orc_compression_codec': {'key': 'typeProperties.orcCompressionCodec', 'type': 'object'}, } def __init__( @@ -27910,7 +28999,7 @@ def __init__( annotations: Optional[List[object]] = None, folder: Optional["DatasetFolder"] = None, location: Optional["DatasetLocation"] = None, - orc_compression_codec: Optional[Union[str, "OrcCompressionCodec"]] = None, + orc_compression_codec: Optional[object] = None, **kwargs ): super(OrcDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -27983,10 +29072,13 @@ class OrcSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: ORC store settings. - :type store_settings: ~data_factory_management_client.models.StoreWriteSettings + :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings :param format_settings: ORC format settings. - :type format_settings: ~data_factory_management_client.models.OrcWriteSettings + :type format_settings: ~azure.mgmt.datafactory.models.OrcWriteSettings """ _validation = { @@ -28001,6 +29093,7 @@ class OrcSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'OrcWriteSettings'}, } @@ -28014,11 +29107,12 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, store_settings: Optional["StoreWriteSettings"] = None, format_settings: Optional["OrcWriteSettings"] = None, **kwargs ): - super(OrcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(OrcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'OrcSink' # type: str self.store_settings = store_settings self.format_settings = format_settings @@ -28043,11 +29137,14 @@ class OrcSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: ORC store settings. - :type store_settings: ~data_factory_management_client.models.StoreReadSettings + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -28060,8 +29157,9 @@ class OrcSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -28071,11 +29169,12 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, store_settings: Optional["StoreReadSettings"] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, **kwargs ): - super(OrcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(OrcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'OrcSource' # type: str self.store_settings = store_settings self.additional_columns = additional_columns @@ -28133,7 +29232,7 @@ class PackageStore(msrest.serialization.Model): :param name: Required. The name of the package store. :type name: str :param package_store_linked_service: Required. The package store linked service reference. - :type package_store_linked_service: ~data_factory_management_client.models.EntityReference + :type package_store_linked_service: ~azure.mgmt.datafactory.models.EntityReference """ _validation = { @@ -28165,7 +29264,7 @@ class ParameterSpecification(msrest.serialization.Model): :param type: Required. Parameter type. Possible values include: "Object", "String", "Int", "Float", "Bool", "Array", "SecureString". - :type type: str or ~data_factory_management_client.models.ParameterType + :type type: str or ~azure.mgmt.datafactory.models.ParameterType :param default_value: Default value of parameter. :type default_value: object """ @@ -28210,19 +29309,19 @@ class ParquetDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param location: The location of the parquet storage. - :type location: ~data_factory_management_client.models.DatasetLocation - :param compression_codec: Possible values include: "none", "gzip", "snappy", "lzo", "bzip2", - "deflate", "zipDeflate", "lz4", "tar", "tarGZip". - :type compression_codec: str or ~data_factory_management_client.models.CompressionCodec + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param compression_codec: The data compressionCodec. Type: string (or Expression with + resultType string). + :type compression_codec: object """ _validation = { @@ -28241,7 +29340,7 @@ class ParquetDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'str'}, + 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, } def __init__( @@ -28256,7 +29355,7 @@ def __init__( annotations: Optional[List[object]] = None, folder: Optional["DatasetFolder"] = None, location: Optional["DatasetLocation"] = None, - compression_codec: Optional[Union[str, "CompressionCodec"]] = None, + compression_codec: Optional[object] = None, **kwargs ): super(ParquetDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -28329,10 +29428,13 @@ class ParquetSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Parquet store settings. - :type store_settings: ~data_factory_management_client.models.StoreWriteSettings + :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings :param format_settings: Parquet format settings. - :type format_settings: ~data_factory_management_client.models.ParquetWriteSettings + :type format_settings: ~azure.mgmt.datafactory.models.ParquetWriteSettings """ _validation = { @@ -28347,6 +29449,7 @@ class ParquetSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'ParquetWriteSettings'}, } @@ -28360,11 +29463,12 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, store_settings: Optional["StoreWriteSettings"] = None, format_settings: Optional["ParquetWriteSettings"] = None, **kwargs ): - super(ParquetSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(ParquetSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'ParquetSink' # type: str self.store_settings = store_settings self.format_settings = format_settings @@ -28389,11 +29493,14 @@ class ParquetSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Parquet store settings. - :type store_settings: ~data_factory_management_client.models.StoreReadSettings + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -28406,8 +29513,9 @@ class ParquetSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -28417,11 +29525,12 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, store_settings: Optional["StoreReadSettings"] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, **kwargs ): - super(ParquetSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(ParquetSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'ParquetSource' # type: str self.store_settings = store_settings self.additional_columns = additional_columns @@ -28482,11 +29591,11 @@ class PaypalLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param host: Required. The URL of the PayPal instance. (i.e. api.sandbox.paypal.com). @@ -28494,7 +29603,7 @@ class PaypalLinkedService(LinkedService): :param client_id: Required. The client ID associated with your PayPal application. :type client_id: object :param client_secret: The client secret associated with your PayPal application. - :type client_secret: ~data_factory_management_client.models.SecretBase + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. :type use_encrypted_endpoints: object @@ -28580,14 +29689,14 @@ class PaypalObjectDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ @@ -28648,12 +29757,15 @@ class PaypalSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -28669,8 +29781,9 @@ class PaypalSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -28681,12 +29794,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(PaypalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(PaypalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'PaypalSource' # type: str self.query = query @@ -28702,11 +29816,11 @@ class PhoenixLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param host: Required. The IP address or host name of the Phoenix server. (i.e. @@ -28722,12 +29836,11 @@ class PhoenixLinkedService(LinkedService): :param authentication_type: Required. The authentication mechanism used to connect to the Phoenix server. Possible values include: "Anonymous", "UsernameAndPassword", "WindowsAzureHDInsightService". - :type authentication_type: str or - ~data_factory_management_client.models.PhoenixAuthenticationType + :type authentication_type: str or ~azure.mgmt.datafactory.models.PhoenixAuthenticationType :param username: The user name used to connect to the Phoenix server. :type username: object :param password: The password corresponding to the user name. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. :type enable_ssl: object @@ -28834,14 +29947,14 @@ class PhoenixObjectDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. :type table_name: object @@ -28915,12 +30028,15 @@ class PhoenixSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -28936,8 +30052,9 @@ class PhoenixSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -28948,12 +30065,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(PhoenixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(PhoenixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'PhoenixSource' # type: str self.query = query @@ -29006,7 +30124,7 @@ class PipelineListResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. :param value: Required. List of pipelines. - :type value: list[~data_factory_management_client.models.PipelineResource] + :type value: list[~azure.mgmt.datafactory.models.PipelineResource] :param next_link: The link to the next page of results, if any remaining results exist. :type next_link: str """ @@ -29036,8 +30154,7 @@ class PipelinePolicy(msrest.serialization.Model): """Pipeline Policy. :param elapsed_time_metric: Pipeline ElapsedTime Metric Policy. - :type elapsed_time_metric: - ~data_factory_management_client.models.PipelineElapsedTimeMetricPolicy + :type elapsed_time_metric: ~azure.mgmt.datafactory.models.PipelineElapsedTimeMetricPolicy """ _attribute_map = { @@ -29113,11 +30230,11 @@ class PipelineResource(SubResource): :param description: The description of the pipeline. :type description: str :param activities: List of activities in pipeline. - :type activities: list[~data_factory_management_client.models.Activity] + :type activities: list[~azure.mgmt.datafactory.models.Activity] :param parameters: List of parameters for pipeline. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param variables: List of variables for pipeline. - :type variables: dict[str, ~data_factory_management_client.models.VariableSpecification] + :type variables: dict[str, ~azure.mgmt.datafactory.models.VariableSpecification] :param concurrency: The max number of concurrent runs for the pipeline. :type concurrency: int :param annotations: List of tags that can be used for describing the Pipeline. @@ -29126,9 +30243,9 @@ class PipelineResource(SubResource): :type run_dimensions: dict[str, object] :param folder: The folder that this Pipeline is in. If not specified, Pipeline will appear at the root level. - :type folder: ~data_factory_management_client.models.PipelineFolder + :type folder: ~azure.mgmt.datafactory.models.PipelineFolder :param policy: Pipeline Policy. - :type policy: ~data_factory_management_client.models.PipelinePolicy + :type policy: ~azure.mgmt.datafactory.models.PipelinePolicy """ _validation = { @@ -29206,7 +30323,7 @@ class PipelineRun(msrest.serialization.Model): :ivar run_dimensions: Run dimensions emitted by Pipeline run. :vartype run_dimensions: dict[str, str] :ivar invoked_by: Entity that started the pipeline run. - :vartype invoked_by: ~data_factory_management_client.models.PipelineRunInvokedBy + :vartype invoked_by: ~azure.mgmt.datafactory.models.PipelineRunInvokedBy :ivar last_updated: The last updated timestamp for the pipeline run event in ISO8601 format. :vartype last_updated: ~datetime.datetime :ivar run_start: The start time of a pipeline run in ISO8601 format. @@ -29288,18 +30405,26 @@ class PipelineRunInvokedBy(msrest.serialization.Model): :vartype id: str :ivar invoked_by_type: The type of the entity that started the run. :vartype invoked_by_type: str + :ivar pipeline_name: The name of the pipeline that triggered the run, if any. + :vartype pipeline_name: str + :ivar pipeline_run_id: The run id of the pipeline that triggered the run, if any. + :vartype pipeline_run_id: str """ _validation = { 'name': {'readonly': True}, 'id': {'readonly': True}, 'invoked_by_type': {'readonly': True}, + 'pipeline_name': {'readonly': True}, + 'pipeline_run_id': {'readonly': True}, } _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'invoked_by_type': {'key': 'invokedByType', 'type': 'str'}, + 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, + 'pipeline_run_id': {'key': 'pipelineRunId', 'type': 'str'}, } def __init__( @@ -29310,6 +30435,8 @@ def __init__( self.name = None self.id = None self.invoked_by_type = None + self.pipeline_name = None + self.pipeline_run_id = None class PipelineRunsQueryResponse(msrest.serialization.Model): @@ -29318,7 +30445,7 @@ class PipelineRunsQueryResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. :param value: Required. List of pipeline runs. - :type value: list[~data_factory_management_client.models.PipelineRun] + :type value: list[~azure.mgmt.datafactory.models.PipelineRun] :param continuation_token: The continuation token for getting the next page of results, if any remaining results exist, null otherwise. :type continuation_token: str @@ -29352,7 +30479,7 @@ class PolybaseSettings(msrest.serialization.Model): collection. :type additional_properties: dict[str, object] :param reject_type: Reject type. Possible values include: "value", "percentage". - :type reject_type: str or ~data_factory_management_client.models.PolybaseSettingsRejectType + :type reject_type: str or ~azure.mgmt.datafactory.models.PolybaseSettingsRejectType :param reject_value: Specifies the value or the percentage of rows that can be rejected before the query fails. Type: number (or Expression with resultType number), minimum: 0. :type reject_value: object @@ -29403,17 +30530,17 @@ class PostgreSqlLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: Required. The connection string. :type connection_string: object :param password: The Azure key vault secret reference of password in connection string. - :type password: ~data_factory_management_client.models.AzureKeyVaultSecretReference + :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -29476,12 +30603,15 @@ class PostgreSqlSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object """ @@ -29496,8 +30626,9 @@ class PostgreSqlSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -29508,12 +30639,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(PostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(PostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'PostgreSqlSource' # type: str self.query = query @@ -29537,14 +30669,14 @@ class PostgreSqlTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. :type table_name: object @@ -29609,11 +30741,11 @@ class PrestoLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param host: Required. The IP address or host name of the Presto server. (i.e. @@ -29628,12 +30760,11 @@ class PrestoLinkedService(LinkedService): :type port: object :param authentication_type: Required. The authentication mechanism used to connect to the Presto server. Possible values include: "Anonymous", "LDAP". - :type authentication_type: str or - ~data_factory_management_client.models.PrestoAuthenticationType + :type authentication_type: str or ~azure.mgmt.datafactory.models.PrestoAuthenticationType :param username: The user name used to connect to the Presto server. :type username: object :param password: The password corresponding to the user name. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. :type enable_ssl: object @@ -29751,14 +30882,14 @@ class PrestoObjectDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. :type table_name: object @@ -29832,12 +30963,15 @@ class PrestoSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -29853,8 +30987,9 @@ class PrestoSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -29865,12 +31000,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(PrestoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(PrestoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'PrestoSource' # type: str self.query = query @@ -29881,7 +31017,7 @@ class PrivateEndpointConnectionListResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. :param value: Required. List of Private Endpoint Connections. - :type value: list[~data_factory_management_client.models.PrivateEndpointConnectionResource] + :type value: list[~azure.mgmt.datafactory.models.PrivateEndpointConnectionResource] :param next_link: The link to the next page of results, if any remaining results exist. :type next_link: str """ @@ -29921,7 +31057,7 @@ class PrivateEndpointConnectionResource(SubResource): :ivar etag: Etag identifies change in the resource. :vartype etag: str :param properties: Core resource properties. - :type properties: ~data_factory_management_client.models.RemotePrivateEndpointConnection + :type properties: ~azure.mgmt.datafactory.models.RemotePrivateEndpointConnection """ _validation = { @@ -29954,7 +31090,7 @@ class PrivateLinkConnectionApprovalRequest(msrest.serialization.Model): :param private_link_service_connection_state: The state of a private link connection. :type private_link_service_connection_state: - ~data_factory_management_client.models.PrivateLinkConnectionState + ~azure.mgmt.datafactory.models.PrivateLinkConnectionState """ _attribute_map = { @@ -29985,7 +31121,7 @@ class PrivateLinkConnectionApprovalRequestResource(SubResource): :ivar etag: Etag identifies change in the resource. :vartype etag: str :param properties: Core resource properties. - :type properties: ~data_factory_management_client.models.PrivateLinkConnectionApprovalRequest + :type properties: ~azure.mgmt.datafactory.models.PrivateLinkConnectionApprovalRequest """ _validation = { @@ -30058,7 +31194,7 @@ class PrivateLinkResource(SubResource): :ivar etag: Etag identifies change in the resource. :vartype etag: str :param properties: Core resource properties. - :type properties: ~data_factory_management_client.models.PrivateLinkResourceProperties + :type properties: ~azure.mgmt.datafactory.models.PrivateLinkResourceProperties """ _validation = { @@ -30127,7 +31263,7 @@ class PrivateLinkResourcesWrapper(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. :param value: Required. - :type value: list[~data_factory_management_client.models.PrivateLinkResource] + :type value: list[~azure.mgmt.datafactory.models.PrivateLinkResource] """ _validation = { @@ -30152,7 +31288,7 @@ class QueryDataFlowDebugSessionsResponse(msrest.serialization.Model): """A list of active debug sessions. :param value: Array with all active debug sessions. - :type value: list[~data_factory_management_client.models.DataFlowDebugSessionInfo] + :type value: list[~azure.mgmt.datafactory.models.DataFlowDebugSessionInfo] :param next_link: The link to the next page of results, if any remaining results exist. :type next_link: str """ @@ -30185,11 +31321,11 @@ class QuickBooksLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_properties: Properties used to connect to QuickBooks. It is mutually @@ -30202,11 +31338,11 @@ class QuickBooksLinkedService(LinkedService): :param consumer_key: The consumer key for OAuth 1.0 authentication. :type consumer_key: object :param consumer_secret: The consumer secret for OAuth 1.0 authentication. - :type consumer_secret: ~data_factory_management_client.models.SecretBase + :type consumer_secret: ~azure.mgmt.datafactory.models.SecretBase :param access_token: The access token for OAuth 1.0 authentication. - :type access_token: ~data_factory_management_client.models.SecretBase + :type access_token: ~azure.mgmt.datafactory.models.SecretBase :param access_token_secret: The access token secret for OAuth 1.0 authentication. - :type access_token_secret: ~data_factory_management_client.models.SecretBase + :type access_token_secret: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. :type use_encrypted_endpoints: object @@ -30289,14 +31425,14 @@ class QuickBooksObjectDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ @@ -30357,12 +31493,15 @@ class QuickBooksSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -30378,8 +31517,9 @@ class QuickBooksSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -30390,12 +31530,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(QuickBooksSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(QuickBooksSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'QuickBooksSource' # type: str self.query = query @@ -30411,12 +31552,11 @@ class RecurrenceSchedule(msrest.serialization.Model): :param hours: The hours. :type hours: list[int] :param week_days: The days of the week. - :type week_days: list[str or ~data_factory_management_client.models.DaysOfWeek] + :type week_days: list[str or ~azure.mgmt.datafactory.models.DaysOfWeek] :param month_days: The month days. :type month_days: list[int] :param monthly_occurrences: The monthly occurrences. - :type monthly_occurrences: - list[~data_factory_management_client.models.RecurrenceScheduleOccurrence] + :type monthly_occurrences: list[~azure.mgmt.datafactory.models.RecurrenceScheduleOccurrence] """ _attribute_map = { @@ -30456,7 +31596,7 @@ class RecurrenceScheduleOccurrence(msrest.serialization.Model): :type additional_properties: dict[str, object] :param day: The day of the week. Possible values include: "Sunday", "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday". - :type day: str or ~data_factory_management_client.models.DayOfWeek + :type day: str or ~azure.mgmt.datafactory.models.DayOfWeek :param occurrence: The occurrence. :type occurrence: int """ @@ -30530,7 +31670,7 @@ class RedshiftUnloadSettings(msrest.serialization.Model): :param s3_linked_service_name: Required. The name of the Amazon S3 linked service which will be used for the unload operation when copying from the Amazon Redshift source. - :type s3_linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type s3_linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param bucket_name: Required. The bucket of the interim Amazon S3 which will be used to store the unloaded data from Amazon Redshift source. The bucket must be in the same region as the Amazon Redshift source. Type: string (or Expression with resultType string). @@ -30578,11 +31718,14 @@ class RelationalSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -30595,8 +31738,9 @@ class RelationalSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -30606,11 +31750,12 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, **kwargs ): - super(RelationalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(RelationalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'RelationalSource' # type: str self.query = query self.additional_columns = additional_columns @@ -30635,14 +31780,14 @@ class RelationalTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The relational table name. Type: string (or Expression with resultType string). :type table_name: object @@ -30693,10 +31838,10 @@ class RemotePrivateEndpointConnection(msrest.serialization.Model): :ivar provisioning_state: :vartype provisioning_state: str :param private_endpoint: PrivateEndpoint of a remote private endpoint connection. - :type private_endpoint: ~data_factory_management_client.models.ArmIdWrapper + :type private_endpoint: ~azure.mgmt.datafactory.models.ArmIdWrapper :param private_link_service_connection_state: The state of a private link connection. :type private_link_service_connection_state: - ~data_factory_management_client.models.PrivateLinkConnectionState + ~azure.mgmt.datafactory.models.PrivateLinkConnectionState """ _validation = { @@ -30738,7 +31883,7 @@ class RerunTumblingWindowTrigger(Trigger): :type description: str :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". - :vartype runtime_state: str or ~data_factory_management_client.models.TriggerRuntimeState + :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState :param annotations: List of tags that can be used for describing the trigger. :type annotations: list[object] :param parent_trigger: Required. The parent trigger reference. @@ -30806,11 +31951,11 @@ class ResponsysLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param endpoint: Required. The endpoint of the Responsys server. @@ -30820,7 +31965,7 @@ class ResponsysLinkedService(LinkedService): :type client_id: object :param client_secret: The client secret associated with the Responsys application. Type: string (or Expression with resultType string). - :type client_secret: ~data_factory_management_client.models.SecretBase + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). :type use_encrypted_endpoints: object @@ -30907,14 +32052,14 @@ class ResponsysObjectDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ @@ -30975,12 +32120,15 @@ class ResponsysSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -30996,8 +32144,9 @@ class ResponsysSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -31008,12 +32157,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(ResponsysSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(ResponsysSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'ResponsysSource' # type: str self.query = query @@ -31037,14 +32187,14 @@ class RestResourceDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param relative_url: The relative URL to the resource that the RESTful API provides. Type: string (or Expression with resultType string). :type relative_url: object @@ -31122,11 +32272,11 @@ class RestServiceLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param url: Required. The base URL of the REST service. @@ -31138,12 +32288,11 @@ class RestServiceLinkedService(LinkedService): :param authentication_type: Required. Type of authentication used to connect to the REST service. Possible values include: "Anonymous", "Basic", "AadServicePrincipal", "ManagedServiceIdentity". - :type authentication_type: str or - ~data_factory_management_client.models.RestServiceAuthenticationType + :type authentication_type: str or ~azure.mgmt.datafactory.models.RestServiceAuthenticationType :param user_name: The user name used in Basic authentication type. :type user_name: object :param password: The password used in Basic authentication type. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param auth_headers: The additional HTTP headers in the request to RESTful API used for authorization. Type: object (or Expression with resultType object). :type auth_headers: object @@ -31152,7 +32301,7 @@ class RestServiceLinkedService(LinkedService): :type service_principal_id: object :param service_principal_key: The application's key used in AadServicePrincipal authentication type. - :type service_principal_key: ~data_factory_management_client.models.SecretBase + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: The tenant information (domain name or tenant ID) used in AadServicePrincipal authentication type under which your application resides. :type tenant: object @@ -31166,6 +32315,8 @@ class RestServiceLinkedService(LinkedService): encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object + :param credential: The credential reference containing authentication information. + :type credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { @@ -31193,6 +32344,7 @@ class RestServiceLinkedService(LinkedService): 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, } def __init__( @@ -31215,6 +32367,7 @@ def __init__( azure_cloud_type: Optional[object] = None, aad_resource_id: Optional[object] = None, encrypted_credential: Optional[object] = None, + credential: Optional["CredentialReference"] = None, **kwargs ): super(RestServiceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -31231,6 +32384,7 @@ def __init__( self.azure_cloud_type = azure_cloud_type self.aad_resource_id = aad_resource_id self.encrypted_credential = encrypted_credential + self.credential = credential class RestSink(CopySink): @@ -31258,6 +32412,9 @@ class RestSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param request_method: The HTTP method used to call the RESTful API. The default is POST. Type: string (or Expression with resultType string). :type request_method: object @@ -31288,6 +32445,7 @@ class RestSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'request_method': {'key': 'requestMethod', 'type': 'object'}, 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, @@ -31304,6 +32462,7 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, request_method: Optional[object] = None, additional_headers: Optional[object] = None, http_request_timeout: Optional[object] = None, @@ -31311,7 +32470,7 @@ def __init__( http_compression_type: Optional[object] = None, **kwargs ): - super(RestSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(RestSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'RestSink' # type: str self.request_method = request_method self.additional_headers = additional_headers @@ -31339,6 +32498,9 @@ class RestSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param request_method: The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). :type request_method: object @@ -31359,8 +32521,8 @@ class RestSource(CopySource): :param request_interval: The time to await before sending next page request. :type request_interval: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -31373,13 +32535,14 @@ class RestSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'request_method': {'key': 'requestMethod', 'type': 'object'}, 'request_body': {'key': 'requestBody', 'type': 'object'}, 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, 'pagination_rules': {'key': 'paginationRules', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, 'request_interval': {'key': 'requestInterval', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -31389,16 +32552,17 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, request_method: Optional[object] = None, request_body: Optional[object] = None, additional_headers: Optional[object] = None, pagination_rules: Optional[object] = None, http_request_timeout: Optional[object] = None, request_interval: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, **kwargs ): - super(RestSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(RestSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'RestSource' # type: str self.request_method = request_method self.request_body = request_body @@ -31455,9 +32619,9 @@ class RunFilterParameters(msrest.serialization.Model): 'ISO 8601' format. :type last_updated_before: ~datetime.datetime :param filters: List of filters. - :type filters: list[~data_factory_management_client.models.RunQueryFilter] + :type filters: list[~azure.mgmt.datafactory.models.RunQueryFilter] :param order_by: List of OrderBy option. - :type order_by: list[~data_factory_management_client.models.RunQueryOrderBy] + :type order_by: list[~azure.mgmt.datafactory.models.RunQueryOrderBy] """ _validation = { @@ -31502,10 +32666,10 @@ class RunQueryFilter(msrest.serialization.Model): runs are TriggerName, TriggerRunTimestamp and Status. Possible values include: "PipelineName", "Status", "RunStart", "RunEnd", "ActivityName", "ActivityRunStart", "ActivityRunEnd", "ActivityType", "TriggerName", "TriggerRunTimestamp", "RunGroupId", "LatestOnly". - :type operand: str or ~data_factory_management_client.models.RunQueryFilterOperand + :type operand: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperand :param operator: Required. Operator to be used for filter. Possible values include: "Equals", "NotEquals", "In", "NotIn". - :type operator: str or ~data_factory_management_client.models.RunQueryFilterOperator + :type operator: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperator :param values: Required. List of filter values. :type values: list[str] """ @@ -31547,9 +32711,9 @@ class RunQueryOrderBy(msrest.serialization.Model): TriggerRunTimestamp and Status. Possible values include: "RunStart", "RunEnd", "PipelineName", "Status", "ActivityName", "ActivityRunStart", "ActivityRunEnd", "TriggerName", "TriggerRunTimestamp". - :type order_by: str or ~data_factory_management_client.models.RunQueryOrderByField + :type order_by: str or ~azure.mgmt.datafactory.models.RunQueryOrderByField :param order: Required. Sorting order of the parameter. Possible values include: "ASC", "DESC". - :type order: str or ~data_factory_management_client.models.RunQueryOrder + :type order: str or ~azure.mgmt.datafactory.models.RunQueryOrder """ _validation = { @@ -31585,11 +32749,11 @@ class SalesforceLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param environment_url: The URL of Salesforce instance. Default is @@ -31601,9 +32765,9 @@ class SalesforceLinkedService(LinkedService): (or Expression with resultType string). :type username: object :param password: The password for Basic authentication of the Salesforce instance. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param security_token: The security token is optional to remotely access Salesforce instance. - :type security_token: ~data_factory_management_client.models.SecretBase + :type security_token: ~azure.mgmt.datafactory.models.SecretBase :param api_version: The Salesforce API version used in ADF. Type: string (or Expression with resultType string). :type api_version: object @@ -31669,11 +32833,11 @@ class SalesforceMarketingCloudLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_properties: Properties used to connect to Salesforce Marketing Cloud. It is @@ -31684,7 +32848,7 @@ class SalesforceMarketingCloudLinkedService(LinkedService): :type client_id: object :param client_secret: The client secret associated with the Salesforce Marketing Cloud application. Type: string (or Expression with resultType string). - :type client_secret: ~data_factory_management_client.models.SecretBase + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). :type use_encrypted_endpoints: object @@ -31769,14 +32933,14 @@ class SalesforceMarketingCloudObjectDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ @@ -31837,12 +33001,15 @@ class SalesforceMarketingCloudSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -31858,8 +33025,9 @@ class SalesforceMarketingCloudSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -31870,12 +33038,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(SalesforceMarketingCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SalesforceMarketingCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SalesforceMarketingCloudSource' # type: str self.query = query @@ -31899,14 +33068,14 @@ class SalesforceObjectDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param object_api_name: The Salesforce object API name. Type: string (or Expression with resultType string). :type object_api_name: object @@ -31960,11 +33129,11 @@ class SalesforceServiceCloudLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param environment_url: The URL of Salesforce Service Cloud instance. Default is @@ -31976,9 +33145,9 @@ class SalesforceServiceCloudLinkedService(LinkedService): (or Expression with resultType string). :type username: object :param password: The password for Basic authentication of the Salesforce instance. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param security_token: The security token is optional to remotely access Salesforce instance. - :type security_token: ~data_factory_management_client.models.SecretBase + :type security_token: ~azure.mgmt.datafactory.models.SecretBase :param api_version: The Salesforce API version used in ADF. Type: string (or Expression with resultType string). :type api_version: object @@ -32058,14 +33227,14 @@ class SalesforceServiceCloudObjectDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param object_api_name: The Salesforce Service Cloud object API name. Type: string (or Expression with resultType string). :type object_api_name: object @@ -32133,9 +33302,12 @@ class SalesforceServiceCloudSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param write_behavior: The write behavior for the operation. Default is Insert. Possible values include: "Insert", "Upsert". - :type write_behavior: str or ~data_factory_management_client.models.SalesforceSinkWriteBehavior + :type write_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior :param external_id_field_name: The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or Expression with resultType string). :type external_id_field_name: object @@ -32160,6 +33332,7 @@ class SalesforceServiceCloudSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, @@ -32174,12 +33347,13 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, write_behavior: Optional[Union[str, "SalesforceSinkWriteBehavior"]] = None, external_id_field_name: Optional[object] = None, ignore_null_values: Optional[object] = None, **kwargs ): - super(SalesforceServiceCloudSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SalesforceServiceCloudSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SalesforceServiceCloudSink' # type: str self.write_behavior = write_behavior self.external_id_field_name = external_id_field_name @@ -32205,14 +33379,17 @@ class SalesforceServiceCloudSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object :param read_behavior: The read behavior for the operation. Default is Query. Possible values include: "Query", "QueryAll". - :type read_behavior: str or ~data_factory_management_client.models.SalesforceSourceReadBehavior + :type read_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -32225,9 +33402,10 @@ class SalesforceServiceCloudSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -32237,12 +33415,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query: Optional[object] = None, read_behavior: Optional[Union[str, "SalesforceSourceReadBehavior"]] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, **kwargs ): - super(SalesforceServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SalesforceServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SalesforceServiceCloudSource' # type: str self.query = query self.read_behavior = read_behavior @@ -32274,9 +33453,12 @@ class SalesforceSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param write_behavior: The write behavior for the operation. Default is Insert. Possible values include: "Insert", "Upsert". - :type write_behavior: str or ~data_factory_management_client.models.SalesforceSinkWriteBehavior + :type write_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior :param external_id_field_name: The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or Expression with resultType string). :type external_id_field_name: object @@ -32301,6 +33483,7 @@ class SalesforceSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, @@ -32315,12 +33498,13 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, write_behavior: Optional[Union[str, "SalesforceSinkWriteBehavior"]] = None, external_id_field_name: Optional[object] = None, ignore_null_values: Optional[object] = None, **kwargs ): - super(SalesforceSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SalesforceSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SalesforceSink' # type: str self.write_behavior = write_behavior self.external_id_field_name = external_id_field_name @@ -32346,17 +33530,20 @@ class SalesforceSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object :param read_behavior: The read behavior for the operation. Default is Query. Possible values include: "Query", "QueryAll". - :type read_behavior: str or ~data_factory_management_client.models.SalesforceSourceReadBehavior + :type read_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior """ _validation = { @@ -32369,8 +33556,9 @@ class SalesforceSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, } @@ -32382,13 +33570,14 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, read_behavior: Optional[Union[str, "SalesforceSourceReadBehavior"]] = None, **kwargs ): - super(SalesforceSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SalesforceSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SalesforceSource' # type: str self.query = query self.read_behavior = read_behavior @@ -32413,14 +33602,14 @@ class SapBwCubeDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder """ _validation = { @@ -32468,11 +33657,11 @@ class SapBwLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param server: Required. Host name of the SAP BW instance. Type: string (or Expression with @@ -32488,7 +33677,7 @@ class SapBwLinkedService(LinkedService): resultType string). :type user_name: object :param password: Password to access the SAP BW server. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -32562,12 +33751,15 @@ class SapBwSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: MDX query. Type: string (or Expression with resultType string). :type query: object """ @@ -32582,8 +33774,9 @@ class SapBwSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -32594,12 +33787,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(SapBwSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SapBwSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SapBwSource' # type: str self.query = query @@ -32615,11 +33809,11 @@ class SapCloudForCustomerLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param url: Required. The URL of SAP Cloud for Customer OData API. For example, @@ -32630,7 +33824,7 @@ class SapCloudForCustomerLinkedService(LinkedService): resultType string). :type username: object :param password: The password for Basic authentication. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Either encryptedCredential or username/password must be provided. Type: string (or Expression with resultType string). @@ -32696,14 +33890,14 @@ class SapCloudForCustomerResourceDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param path: Required. The path of the SAP Cloud for Customer OData entity. Type: string (or Expression with resultType string). :type path: object @@ -32772,10 +33966,13 @@ class SapCloudForCustomerSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param write_behavior: The write behavior for the operation. Default is 'Insert'. Possible values include: "Insert", "Update". :type write_behavior: str or - ~data_factory_management_client.models.SapCloudForCustomerSinkWriteBehavior + ~azure.mgmt.datafactory.models.SapCloudForCustomerSinkWriteBehavior :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: @@ -32795,6 +33992,7 @@ class SapCloudForCustomerSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } @@ -32808,11 +34006,12 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, write_behavior: Optional[Union[str, "SapCloudForCustomerSinkWriteBehavior"]] = None, http_request_timeout: Optional[object] = None, **kwargs ): - super(SapCloudForCustomerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SapCloudForCustomerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SapCloudForCustomerSink' # type: str self.write_behavior = write_behavior self.http_request_timeout = http_request_timeout @@ -32837,12 +34036,15 @@ class SapCloudForCustomerSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: SAP Cloud for Customer OData query. For example, "$top=1". Type: string (or Expression with resultType string). :type query: object @@ -32863,8 +34065,9 @@ class SapCloudForCustomerSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } @@ -32876,13 +34079,14 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, http_request_timeout: Optional[object] = None, **kwargs ): - super(SapCloudForCustomerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SapCloudForCustomerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SapCloudForCustomerSource' # type: str self.query = query self.http_request_timeout = http_request_timeout @@ -32899,11 +34103,11 @@ class SapEccLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param url: Required. The URL of SAP ECC OData API. For example, @@ -32914,7 +34118,7 @@ class SapEccLinkedService(LinkedService): resultType string). :type username: str :param password: The password for Basic authentication. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Either encryptedCredential or username/password must be provided. Type: string (or Expression with resultType string). @@ -32980,14 +34184,14 @@ class SapEccResourceDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param path: Required. The path of the SAP ECC OData entity. Type: string (or Expression with resultType string). :type path: object @@ -33050,12 +34254,15 @@ class SapEccSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: SAP ECC OData query. For example, "$top=1". Type: string (or Expression with resultType string). :type query: object @@ -33076,8 +34283,9 @@ class SapEccSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } @@ -33089,13 +34297,14 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, http_request_timeout: Optional[object] = None, **kwargs ): - super(SapEccSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SapEccSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SapEccSource' # type: str self.query = query self.http_request_timeout = http_request_timeout @@ -33112,11 +34321,11 @@ class SapHanaLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: SAP HANA ODBC connection string. Type: string, SecureString or @@ -33127,13 +34336,12 @@ class SapHanaLinkedService(LinkedService): :type server: object :param authentication_type: The authentication type to be used to connect to the SAP HANA server. Possible values include: "Basic", "Windows". - :type authentication_type: str or - ~data_factory_management_client.models.SapHanaAuthenticationType + :type authentication_type: str or ~azure.mgmt.datafactory.models.SapHanaAuthenticationType :param user_name: Username to access the SAP HANA server. Type: string (or Expression with resultType string). :type user_name: object :param password: Password to access the SAP HANA server. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -33226,12 +34434,15 @@ class SapHanaSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: SAP HANA Sql query. Type: string (or Expression with resultType string). :type query: object :param packet_size: The packet size of data read from SAP HANA. Type: integer(or Expression @@ -33242,7 +34453,7 @@ class SapHanaSource(TabularSource): :type partition_option: object :param partition_settings: The settings that will be leveraged for SAP HANA source partitioning. - :type partition_settings: ~data_factory_management_client.models.SapHanaPartitionSettings + :type partition_settings: ~azure.mgmt.datafactory.models.SapHanaPartitionSettings """ _validation = { @@ -33255,8 +34466,9 @@ class SapHanaSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'packet_size': {'key': 'packetSize', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'object'}, @@ -33270,15 +34482,16 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, packet_size: Optional[object] = None, partition_option: Optional[object] = None, partition_settings: Optional["SapHanaPartitionSettings"] = None, **kwargs ): - super(SapHanaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SapHanaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SapHanaSource' # type: str self.query = query self.packet_size = packet_size @@ -33305,14 +34518,14 @@ class SapHanaTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param schema_type_properties_schema: The schema name of SAP HANA. Type: string (or Expression with resultType string). :type schema_type_properties_schema: object @@ -33371,11 +34584,11 @@ class SapOpenHubLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param server: Host name of the SAP BW instance where the open hub destination is located. @@ -33400,7 +34613,7 @@ class SapOpenHubLinkedService(LinkedService): :type user_name: object :param password: Password to access the SAP BW server where the open hub destination is located. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param message_server: The hostname of the SAP Message Server. Type: string (or Expression with resultType string). :type message_server: object @@ -33495,12 +34708,15 @@ class SapOpenHubSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param exclude_last_request: Whether to exclude the records of the last request. The default value is true. Type: boolean (or Expression with resultType boolean). :type exclude_last_request: object @@ -33527,8 +34743,9 @@ class SapOpenHubSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'exclude_last_request': {'key': 'excludeLastRequest', 'type': 'object'}, 'base_request_id': {'key': 'baseRequestId', 'type': 'object'}, 'custom_rfc_read_table_function_module': {'key': 'customRfcReadTableFunctionModule', 'type': 'object'}, @@ -33542,15 +34759,16 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, exclude_last_request: Optional[object] = None, base_request_id: Optional[object] = None, custom_rfc_read_table_function_module: Optional[object] = None, sap_data_column_delimiter: Optional[object] = None, **kwargs ): - super(SapOpenHubSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SapOpenHubSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SapOpenHubSource' # type: str self.exclude_last_request = exclude_last_request self.base_request_id = base_request_id @@ -33577,14 +34795,14 @@ class SapOpenHubTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param open_hub_destination_name: Required. The name of the Open Hub Destination with destination type as Database Table. Type: string (or Expression with resultType string). :type open_hub_destination_name: object @@ -33652,11 +34870,11 @@ class SapTableLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param server: Host name of the SAP instance where the table is located. Type: string (or @@ -33680,7 +34898,7 @@ class SapTableLinkedService(LinkedService): (or Expression with resultType string). :type user_name: object :param password: Password to access the SAP server where the table is located. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param message_server: The hostname of the SAP Message Server. Type: string (or Expression with resultType string). :type message_server: object @@ -33847,14 +35065,14 @@ class SapTableResourceDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: Required. The name of the SAP Table. Type: string (or Expression with resultType string). :type table_name: object @@ -33917,12 +35135,15 @@ class SapTableSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param row_count: The number of rows to be retrieved. Type: integer(or Expression with resultType integer). :type row_count: object @@ -33951,7 +35172,7 @@ class SapTableSource(TabularSource): :type partition_option: object :param partition_settings: The settings that will be leveraged for SAP table source partitioning. - :type partition_settings: ~data_factory_management_client.models.SapTablePartitionSettings + :type partition_settings: ~azure.mgmt.datafactory.models.SapTablePartitionSettings """ _validation = { @@ -33964,8 +35185,9 @@ class SapTableSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'row_count': {'key': 'rowCount', 'type': 'object'}, 'row_skips': {'key': 'rowSkips', 'type': 'object'}, 'rfc_table_fields': {'key': 'rfcTableFields', 'type': 'object'}, @@ -33984,8 +35206,9 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, row_count: Optional[object] = None, row_skips: Optional[object] = None, rfc_table_fields: Optional[object] = None, @@ -33997,7 +35220,7 @@ def __init__( partition_settings: Optional["SapTablePartitionSettings"] = None, **kwargs ): - super(SapTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SapTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SapTableSource' # type: str self.row_count = row_count self.row_skips = row_skips @@ -34026,13 +35249,13 @@ class ScheduleTrigger(MultiplePipelineTrigger): :type description: str :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". - :vartype runtime_state: str or ~data_factory_management_client.models.TriggerRuntimeState + :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState :param annotations: List of tags that can be used for describing the trigger. :type annotations: list[object] :param pipelines: Pipelines that need to be started. - :type pipelines: list[~data_factory_management_client.models.TriggerPipelineReference] + :type pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] :param recurrence: Required. Recurrence schedule configuration. - :type recurrence: ~data_factory_management_client.models.ScheduleTriggerRecurrence + :type recurrence: ~azure.mgmt.datafactory.models.ScheduleTriggerRecurrence """ _validation = { @@ -34074,7 +35297,7 @@ class ScheduleTriggerRecurrence(msrest.serialization.Model): :type additional_properties: dict[str, object] :param frequency: The frequency. Possible values include: "NotSpecified", "Minute", "Hour", "Day", "Week", "Month", "Year". - :type frequency: str or ~data_factory_management_client.models.RecurrenceFrequency + :type frequency: str or ~azure.mgmt.datafactory.models.RecurrenceFrequency :param interval: The interval. :type interval: int :param start_time: The start time. @@ -34084,7 +35307,7 @@ class ScheduleTriggerRecurrence(msrest.serialization.Model): :param time_zone: The time zone. :type time_zone: str :param schedule: The recurrence schedule. - :type schedule: ~data_factory_management_client.models.RecurrenceSchedule + :type schedule: ~azure.mgmt.datafactory.models.RecurrenceSchedule """ _attribute_map = { @@ -34128,9 +35351,8 @@ class ScriptAction(msrest.serialization.Model): :type name: str :param uri: Required. The URI for the script action. :type uri: str - :param roles: Required. The node types on which the script action should be executed. Possible - values include: "Headnode", "Workernode", "Zookeeper". - :type roles: str or ~data_factory_management_client.models.HdiNodeTypes + :param roles: Required. The node types on which the script action should be executed. + :type roles: str :param parameters: The parameters for the script action. :type parameters: str """ @@ -34153,7 +35375,7 @@ def __init__( *, name: str, uri: str, - roles: Union[str, "HdiNodeTypes"], + roles: str, parameters: Optional[str] = None, **kwargs ): @@ -34246,11 +35468,11 @@ class SelfHostedIntegrationRuntime(IntegrationRuntime): :type additional_properties: dict[str, object] :param type: Required. Type of integration runtime.Constant filled by server. Possible values include: "Managed", "SelfHosted". - :type type: str or ~data_factory_management_client.models.IntegrationRuntimeType + :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType :param description: Integration runtime description. :type description: str :param linked_info: The base definition of a linked integration runtime. - :type linked_info: ~data_factory_management_client.models.LinkedIntegrationRuntimeType + :type linked_info: ~azure.mgmt.datafactory.models.LinkedIntegrationRuntimeType """ _validation = { @@ -34294,8 +35516,7 @@ class SelfHostedIntegrationRuntimeNode(msrest.serialization.Model): :ivar status: Status of the integration runtime node. Possible values include: "NeedRegistration", "Online", "Limited", "Offline", "Upgrading", "Initializing", "InitializeFailed". - :vartype status: str or - ~data_factory_management_client.models.SelfHostedIntegrationRuntimeNodeStatus + :vartype status: str or ~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNodeStatus :ivar capabilities: The integration runtime capabilities dictionary. :vartype capabilities: dict[str, str] :ivar version_status: Status of the integration runtime node version. @@ -34317,7 +35538,7 @@ class SelfHostedIntegrationRuntimeNode(msrest.serialization.Model): :ivar last_update_result: The result of the last integration runtime node update. Possible values include: "None", "Succeed", "Fail". :vartype last_update_result: str or - ~data_factory_management_client.models.IntegrationRuntimeUpdateResult + ~azure.mgmt.datafactory.models.IntegrationRuntimeUpdateResult :ivar last_start_update_time: The last time for the integration runtime node update start. :vartype last_start_update_time: ~datetime.datetime :ivar last_end_update_time: The last time for the integration runtime node update end. @@ -34414,13 +35635,13 @@ class SelfHostedIntegrationRuntimeStatus(IntegrationRuntimeStatus): :type additional_properties: dict[str, object] :param type: Required. Type of integration runtime.Constant filled by server. Possible values include: "Managed", "SelfHosted". - :type type: str or ~data_factory_management_client.models.IntegrationRuntimeType + :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType :ivar data_factory_name: The data factory name which the integration runtime belong to. :vartype data_factory_name: str :ivar state: The state of integration runtime. Possible values include: "Initial", "Stopped", "Started", "Starting", "Stopping", "NeedRegistration", "Online", "Limited", "Offline", "AccessDenied". - :vartype state: str or ~data_factory_management_client.models.IntegrationRuntimeState + :vartype state: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeState :ivar create_time: The time at which the integration runtime was created, in ISO8601 format. :vartype create_time: ~datetime.datetime :ivar task_queue_id: The task queue id of the integration runtime. @@ -34429,11 +35650,11 @@ class SelfHostedIntegrationRuntimeStatus(IntegrationRuntimeStatus): communication channel (when more than 2 self-hosted integration runtime nodes exist). Possible values include: "NotSet", "SslEncrypted", "NotEncrypted". :vartype internal_channel_encryption: str or - ~data_factory_management_client.models.IntegrationRuntimeInternalChannelEncryptionMode + ~azure.mgmt.datafactory.models.IntegrationRuntimeInternalChannelEncryptionMode :ivar version: Version of the integration runtime. :vartype version: str :param nodes: The list of nodes for this integration runtime. - :type nodes: list[~data_factory_management_client.models.SelfHostedIntegrationRuntimeNode] + :type nodes: list[~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNode] :ivar scheduled_update_date: The date at which the integration runtime will be scheduled to update, in ISO8601 format. :vartype scheduled_update_date: ~datetime.datetime @@ -34448,13 +35669,12 @@ class SelfHostedIntegrationRuntimeStatus(IntegrationRuntimeStatus): :vartype service_urls: list[str] :ivar auto_update: Whether Self-hosted integration runtime auto update has been turned on. Possible values include: "On", "Off". - :vartype auto_update: str or - ~data_factory_management_client.models.IntegrationRuntimeAutoUpdate + :vartype auto_update: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeAutoUpdate :ivar version_status: Status of the integration runtime version. :vartype version_status: str :param links: The list of linked integration runtimes that are created to share with this integration runtime. - :type links: list[~data_factory_management_client.models.LinkedIntegrationRuntime] + :type links: list[~azure.mgmt.datafactory.models.LinkedIntegrationRuntime] :ivar pushed_version: The version that the integration runtime is going to update to. :vartype pushed_version: str :ivar latest_version: The latest version on download center. @@ -34546,11 +35766,11 @@ class ServiceNowLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param endpoint: Required. The endpoint of the ServiceNow server. (i.e. @@ -34558,18 +35778,17 @@ class ServiceNowLinkedService(LinkedService): :type endpoint: object :param authentication_type: Required. The authentication type to use. Possible values include: "Basic", "OAuth2". - :type authentication_type: str or - ~data_factory_management_client.models.ServiceNowAuthenticationType + :type authentication_type: str or ~azure.mgmt.datafactory.models.ServiceNowAuthenticationType :param username: The user name used to connect to the ServiceNow server for Basic and OAuth2 authentication. :type username: object :param password: The password corresponding to the user name for Basic and OAuth2 authentication. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param client_id: The client id for OAuth2 authentication. :type client_id: object :param client_secret: The client secret for OAuth2 authentication. - :type client_secret: ~data_factory_management_client.models.SecretBase + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. :type use_encrypted_endpoints: object @@ -34664,14 +35883,14 @@ class ServiceNowObjectDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ @@ -34732,12 +35951,15 @@ class ServiceNowSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -34753,8 +35975,9 @@ class ServiceNowSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -34765,16 +35988,71 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(ServiceNowSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(ServiceNowSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'ServiceNowSource' # type: str self.query = query +class ServicePrincipalCredential(Credential): + """Service principal credential. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of credential.Constant filled by server. + :type type: str + :param description: Credential description. + :type description: str + :param annotations: List of tags that can be used for describing the Credential. + :type annotations: list[object] + :param service_principal_id: The app ID of the service principal used to authenticate. + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to authenticate. + :type service_principal_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param tenant: The ID of the tenant to which the service principal belongs. + :type tenant: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'AzureKeyVaultSecretReference'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + annotations: Optional[List[object]] = None, + service_principal_id: Optional[object] = None, + service_principal_key: Optional["AzureKeyVaultSecretReference"] = None, + tenant: Optional[object] = None, + **kwargs + ): + super(ServicePrincipalCredential, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) + self.type = 'ServicePrincipal' # type: str + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + + class SetVariableActivity(Activity): """Set value for a Variable. @@ -34790,9 +36068,9 @@ class SetVariableActivity(Activity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param variable_name: Name of the variable whose value needs to be set. :type variable_name: str :param value: Value to be set. Could be a static value or Expression. @@ -34887,6 +36165,9 @@ class SftpReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -34924,6 +36205,7 @@ class SftpReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -34940,6 +36222,7 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, recursive: Optional[object] = None, wildcard_folder_path: Optional[object] = None, wildcard_file_name: Optional[object] = None, @@ -34951,7 +36234,7 @@ def __init__( modified_datetime_end: Optional[object] = None, **kwargs ): - super(SftpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SftpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SftpReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path @@ -34975,11 +36258,11 @@ class SftpServerLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param host: Required. The SFTP server host name. Type: string (or Expression with resultType @@ -34990,12 +36273,12 @@ class SftpServerLinkedService(LinkedService): :type port: object :param authentication_type: The authentication type to be used to connect to the FTP server. Possible values include: "Basic", "SshPublicKey", "MultiFactor". - :type authentication_type: str or ~data_factory_management_client.models.SftpAuthenticationType + :type authentication_type: str or ~azure.mgmt.datafactory.models.SftpAuthenticationType :param user_name: The username used to log on to the SFTP server. Type: string (or Expression with resultType string). :type user_name: object :param password: Password to logon the SFTP server for Basic authentication. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -35008,10 +36291,10 @@ class SftpServerLinkedService(LinkedService): :param private_key_content: Base64 encoded SSH private key content for SshPublicKey authentication. For on-premises copy with SshPublicKey authentication, either PrivateKeyPath or PrivateKeyContent should be specified. SSH private key should be OpenSSH format. - :type private_key_content: ~data_factory_management_client.models.SecretBase + :type private_key_content: ~azure.mgmt.datafactory.models.SecretBase :param pass_phrase: The password to decrypt the SSH private key if the SSH private key is encrypted. - :type pass_phrase: ~data_factory_management_client.models.SecretBase + :type pass_phrase: ~azure.mgmt.datafactory.models.SecretBase :param skip_host_key_validation: If true, skip the SSH host key validation. Default value is false. Type: boolean (or Expression with resultType boolean). :type skip_host_key_validation: object @@ -35095,6 +36378,9 @@ class SftpWriteSettings(StoreWriteSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object :param operation_timeout: Specifies the timeout for writing each chunk to SFTP server. Default @@ -35114,6 +36400,7 @@ class SftpWriteSettings(StoreWriteSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'operation_timeout': {'key': 'operationTimeout', 'type': 'object'}, 'use_temp_file_rename': {'key': 'useTempFileRename', 'type': 'object'}, @@ -35124,12 +36411,13 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, copy_behavior: Optional[object] = None, operation_timeout: Optional[object] = None, use_temp_file_rename: Optional[object] = None, **kwargs ): - super(SftpWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + super(SftpWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, copy_behavior=copy_behavior, **kwargs) self.type = 'SftpWriteSettings' # type: str self.operation_timeout = operation_timeout self.use_temp_file_rename = use_temp_file_rename @@ -35146,11 +36434,11 @@ class SharePointOnlineListLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param site_url: Required. The URL of the SharePoint Online site. For example, @@ -35167,7 +36455,7 @@ class SharePointOnlineListLinkedService(LinkedService): :type service_principal_id: object :param service_principal_key: Required. The client secret of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). - :type service_principal_key: ~data_factory_management_client.models.SecretBase + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -35239,14 +36527,14 @@ class SharePointOnlineListResourceDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param list_name: The name of the SharePoint Online list. Type: string (or Expression with resultType string). :type list_name: object @@ -35308,6 +36596,9 @@ class SharePointOnlineListSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: The OData query to filter the data in SharePoint Online list. For example, "$top=1". Type: string (or Expression with resultType string). :type query: object @@ -35327,6 +36618,7 @@ class SharePointOnlineListSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } @@ -35338,11 +36630,12 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query: Optional[object] = None, http_request_timeout: Optional[object] = None, **kwargs ): - super(SharePointOnlineListSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SharePointOnlineListSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SharePointOnlineListSource' # type: str self.query = query self.http_request_timeout = http_request_timeout @@ -35359,18 +36652,18 @@ class ShopifyLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param host: Required. The endpoint of the Shopify server. (i.e. mystore.myshopify.com). :type host: object :param access_token: The API access token that can be used to access Shopify’s data. The token won't expire if it is offline mode. - :type access_token: ~data_factory_management_client.models.SecretBase + :type access_token: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. :type use_encrypted_endpoints: object @@ -35452,14 +36745,14 @@ class ShopifyObjectDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ @@ -35520,12 +36813,15 @@ class ShopifySource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -35541,8 +36837,9 @@ class ShopifySource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -35553,12 +36850,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(ShopifySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(ShopifySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'ShopifySource' # type: str self.query = query @@ -35610,14 +36908,14 @@ class SnowflakeDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param schema_type_properties_schema: The schema name of the Snowflake database. Type: string (or Expression with resultType string). :type schema_type_properties_schema: object @@ -35771,18 +37069,18 @@ class SnowflakeLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: Required. The connection string of snowflake. Type: string, SecureString. :type connection_string: object :param password: The Azure key vault secret reference of password in connection string. - :type password: ~data_factory_management_client.models.AzureKeyVaultSecretReference + :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -35851,11 +37149,14 @@ class SnowflakeSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). :type pre_copy_script: object :param import_settings: Snowflake import settings. - :type import_settings: ~data_factory_management_client.models.SnowflakeImportCopyCommand + :type import_settings: ~azure.mgmt.datafactory.models.SnowflakeImportCopyCommand """ _validation = { @@ -35870,6 +37171,7 @@ class SnowflakeSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'import_settings': {'key': 'importSettings', 'type': 'SnowflakeImportCopyCommand'}, } @@ -35883,11 +37185,12 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, pre_copy_script: Optional[object] = None, import_settings: Optional["SnowflakeImportCopyCommand"] = None, **kwargs ): - super(SnowflakeSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SnowflakeSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SnowflakeSink' # type: str self.pre_copy_script = pre_copy_script self.import_settings = import_settings @@ -35912,10 +37215,13 @@ class SnowflakeSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: Snowflake Sql query. Type: string (or Expression with resultType string). :type query: object :param export_settings: Snowflake export settings. - :type export_settings: ~data_factory_management_client.models.SnowflakeExportCopyCommand + :type export_settings: ~azure.mgmt.datafactory.models.SnowflakeExportCopyCommand """ _validation = { @@ -35928,6 +37234,7 @@ class SnowflakeSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'export_settings': {'key': 'exportSettings', 'type': 'SnowflakeExportCopyCommand'}, } @@ -35939,11 +37246,12 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query: Optional[object] = None, export_settings: Optional["SnowflakeExportCopyCommand"] = None, **kwargs ): - super(SnowflakeSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SnowflakeSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SnowflakeSource' # type: str self.query = query self.export_settings = export_settings @@ -35960,11 +37268,11 @@ class SparkLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param host: Required. IP address or host name of the Spark server. @@ -35974,21 +37282,20 @@ class SparkLinkedService(LinkedService): :type port: object :param server_type: The type of Spark server. Possible values include: "SharkServer", "SharkServer2", "SparkThriftServer". - :type server_type: str or ~data_factory_management_client.models.SparkServerType + :type server_type: str or ~azure.mgmt.datafactory.models.SparkServerType :param thrift_transport_protocol: The transport protocol to use in the Thrift layer. Possible values include: "Binary", "SASL", "HTTP ". :type thrift_transport_protocol: str or - ~data_factory_management_client.models.SparkThriftTransportProtocol + ~azure.mgmt.datafactory.models.SparkThriftTransportProtocol :param authentication_type: Required. The authentication method used to access the Spark server. Possible values include: "Anonymous", "Username", "UsernameAndPassword", "WindowsAzureHDInsightService". - :type authentication_type: str or - ~data_factory_management_client.models.SparkAuthenticationType + :type authentication_type: str or ~azure.mgmt.datafactory.models.SparkAuthenticationType :param username: The user name that you use to access Spark Server. :type username: object :param password: The password corresponding to the user name that you provided in the Username field. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param http_path: The partial URL corresponding to the Spark server. :type http_path: object :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The @@ -36104,14 +37411,14 @@ class SparkObjectDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. :type table_name: object @@ -36184,12 +37491,15 @@ class SparkSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -36205,8 +37515,9 @@ class SparkSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -36217,12 +37528,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(SparkSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SparkSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SparkSource' # type: str self.query = query @@ -36236,13 +37548,13 @@ class SqlAlwaysEncryptedProperties(msrest.serialization.Model): Type: string (or Expression with resultType string). Possible values include: "ServicePrincipal", "ManagedIdentity". :type always_encrypted_akv_auth_type: str or - ~data_factory_management_client.models.SqlAlwaysEncryptedAkvAuthType + ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedAkvAuthType :param service_principal_id: The client ID of the application in Azure Active Directory used for Azure Key Vault authentication. Type: string (or Expression with resultType string). :type service_principal_id: object :param service_principal_key: The key of the service principal used to authenticate against Azure Key Vault. - :type service_principal_key: ~data_factory_management_client.models.SecretBase + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase """ _validation = { @@ -36294,6 +37606,9 @@ class SqlDwSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). :type pre_copy_script: object @@ -36301,16 +37616,24 @@ class SqlDwSink(CopySink): applicable. Type: boolean (or Expression with resultType boolean). :type allow_poly_base: object :param poly_base_settings: Specifies PolyBase-related settings when allowPolyBase is true. - :type poly_base_settings: ~data_factory_management_client.models.PolybaseSettings + :type poly_base_settings: ~azure.mgmt.datafactory.models.PolybaseSettings :param allow_copy_command: Indicates to use Copy Command to copy data into SQL Data Warehouse. Type: boolean (or Expression with resultType boolean). :type allow_copy_command: object :param copy_command_settings: Specifies Copy Command related settings when allowCopyCommand is true. - :type copy_command_settings: ~data_factory_management_client.models.DwCopyCommandSettings + :type copy_command_settings: ~azure.mgmt.datafactory.models.DwCopyCommandSettings :param table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). :type table_option: object + :param sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or + Expression with resultType boolean). + :type sql_writer_use_table_lock: object + :param write_behavior: Write behavior when copying data into azure SQL DW. Type: + SqlDWWriteBehaviorEnum (or Expression with resultType SqlDWWriteBehaviorEnum). + :type write_behavior: object + :param upsert_settings: SQL DW upsert settings. + :type upsert_settings: ~azure.mgmt.datafactory.models.SqlDwUpsertSettings """ _validation = { @@ -36325,12 +37648,16 @@ class SqlDwSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'allow_poly_base': {'key': 'allowPolyBase', 'type': 'object'}, 'poly_base_settings': {'key': 'polyBaseSettings', 'type': 'PolybaseSettings'}, 'allow_copy_command': {'key': 'allowCopyCommand', 'type': 'object'}, 'copy_command_settings': {'key': 'copyCommandSettings', 'type': 'DwCopyCommandSettings'}, 'table_option': {'key': 'tableOption', 'type': 'object'}, + 'sql_writer_use_table_lock': {'key': 'sqlWriterUseTableLock', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + 'upsert_settings': {'key': 'upsertSettings', 'type': 'SqlDwUpsertSettings'}, } def __init__( @@ -36342,15 +37669,19 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, pre_copy_script: Optional[object] = None, allow_poly_base: Optional[object] = None, poly_base_settings: Optional["PolybaseSettings"] = None, allow_copy_command: Optional[object] = None, copy_command_settings: Optional["DwCopyCommandSettings"] = None, table_option: Optional[object] = None, + sql_writer_use_table_lock: Optional[object] = None, + write_behavior: Optional[object] = None, + upsert_settings: Optional["SqlDwUpsertSettings"] = None, **kwargs ): - super(SqlDwSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SqlDwSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SqlDWSink' # type: str self.pre_copy_script = pre_copy_script self.allow_poly_base = allow_poly_base @@ -36358,6 +37689,9 @@ def __init__( self.allow_copy_command = allow_copy_command self.copy_command_settings = copy_command_settings self.table_option = table_option + self.sql_writer_use_table_lock = sql_writer_use_table_lock + self.write_behavior = write_behavior + self.upsert_settings = upsert_settings class SqlDwSource(TabularSource): @@ -36379,12 +37713,15 @@ class SqlDwSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param sql_reader_query: SQL Data Warehouse reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object @@ -36400,7 +37737,7 @@ class SqlDwSource(TabularSource): Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". :type partition_option: object :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~data_factory_management_client.models.SqlPartitionSettings + :type partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings """ _validation = { @@ -36413,8 +37750,9 @@ class SqlDwSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': 'object'}, @@ -36429,8 +37767,9 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, sql_reader_query: Optional[object] = None, sql_reader_stored_procedure_name: Optional[object] = None, stored_procedure_parameters: Optional[object] = None, @@ -36438,7 +37777,7 @@ def __init__( partition_settings: Optional["SqlPartitionSettings"] = None, **kwargs ): - super(SqlDwSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SqlDwSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SqlDWSource' # type: str self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name @@ -36447,6 +37786,34 @@ def __init__( self.partition_settings = partition_settings +class SqlDwUpsertSettings(msrest.serialization.Model): + """Sql DW upsert option settings. + + :param interim_schema_name: Schema name for interim table. Type: string (or Expression with + resultType string). + :type interim_schema_name: object + :param keys: Key column names for unique row identification. Type: array of strings (or + Expression with resultType array of strings). + :type keys: object + """ + + _attribute_map = { + 'interim_schema_name': {'key': 'interimSchemaName', 'type': 'object'}, + 'keys': {'key': 'keys', 'type': 'object'}, + } + + def __init__( + self, + *, + interim_schema_name: Optional[object] = None, + keys: Optional[object] = None, + **kwargs + ): + super(SqlDwUpsertSettings, self).__init__(**kwargs) + self.interim_schema_name = interim_schema_name + self.keys = keys + + class SqlMiSink(CopySink): """A copy activity Azure SQL Managed Instance sink. @@ -36472,6 +37839,9 @@ class SqlMiSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). :type sql_writer_stored_procedure_name: object @@ -36483,13 +37853,21 @@ class SqlMiSink(CopySink): :type pre_copy_script: object :param stored_procedure_parameters: SQL stored procedure parameters. :type stored_procedure_parameters: dict[str, - ~data_factory_management_client.models.StoredProcedureParameter] + ~azure.mgmt.datafactory.models.StoredProcedureParameter] :param stored_procedure_table_type_parameter_name: The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). :type stored_procedure_table_type_parameter_name: object :param table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). :type table_option: object + :param sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or + Expression with resultType boolean). + :type sql_writer_use_table_lock: object + :param write_behavior: White behavior when copying data into azure SQL MI. Type: + SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum). + :type write_behavior: object + :param upsert_settings: SQL upsert settings. + :type upsert_settings: ~azure.mgmt.datafactory.models.SqlUpsertSettings """ _validation = { @@ -36504,12 +37882,16 @@ class SqlMiSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, 'table_option': {'key': 'tableOption', 'type': 'object'}, + 'sql_writer_use_table_lock': {'key': 'sqlWriterUseTableLock', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + 'upsert_settings': {'key': 'upsertSettings', 'type': 'SqlUpsertSettings'}, } def __init__( @@ -36521,15 +37903,19 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, sql_writer_stored_procedure_name: Optional[object] = None, sql_writer_table_type: Optional[object] = None, pre_copy_script: Optional[object] = None, stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, stored_procedure_table_type_parameter_name: Optional[object] = None, table_option: Optional[object] = None, + sql_writer_use_table_lock: Optional[object] = None, + write_behavior: Optional[object] = None, + upsert_settings: Optional["SqlUpsertSettings"] = None, **kwargs ): - super(SqlMiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SqlMiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SqlMISink' # type: str self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name self.sql_writer_table_type = sql_writer_table_type @@ -36537,6 +37923,9 @@ def __init__( self.stored_procedure_parameters = stored_procedure_parameters self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name self.table_option = table_option + self.sql_writer_use_table_lock = sql_writer_use_table_lock + self.write_behavior = write_behavior + self.upsert_settings = upsert_settings class SqlMiSource(TabularSource): @@ -36558,12 +37947,15 @@ class SqlMiSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object :param sql_reader_stored_procedure_name: Name of the stored procedure for a Azure SQL Managed @@ -36573,14 +37965,14 @@ class SqlMiSource(TabularSource): :param stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". :type stored_procedure_parameters: dict[str, - ~data_factory_management_client.models.StoredProcedureParameter] + ~azure.mgmt.datafactory.models.StoredProcedureParameter] :param produce_additional_types: Which additional types to produce. :type produce_additional_types: object :param partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". :type partition_option: object :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~data_factory_management_client.models.SqlPartitionSettings + :type partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings """ _validation = { @@ -36593,8 +37985,9 @@ class SqlMiSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, @@ -36610,8 +38003,9 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, sql_reader_query: Optional[object] = None, sql_reader_stored_procedure_name: Optional[object] = None, stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, @@ -36620,7 +38014,7 @@ def __init__( partition_settings: Optional["SqlPartitionSettings"] = None, **kwargs ): - super(SqlMiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SqlMiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SqlMISource' # type: str self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name @@ -36680,11 +38074,11 @@ class SqlServerLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: Required. The connection string. Type: string, SecureString or @@ -36694,14 +38088,13 @@ class SqlServerLinkedService(LinkedService): with resultType string). :type user_name: object :param password: The on-premises Windows authentication password. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object :param always_encrypted_settings: Sql always encrypted properties. - :type always_encrypted_settings: - ~data_factory_management_client.models.SqlAlwaysEncryptedProperties + :type always_encrypted_settings: ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties """ _validation = { @@ -36772,6 +38165,9 @@ class SqlServerSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). :type sql_writer_stored_procedure_name: object @@ -36783,13 +38179,21 @@ class SqlServerSink(CopySink): :type pre_copy_script: object :param stored_procedure_parameters: SQL stored procedure parameters. :type stored_procedure_parameters: dict[str, - ~data_factory_management_client.models.StoredProcedureParameter] + ~azure.mgmt.datafactory.models.StoredProcedureParameter] :param stored_procedure_table_type_parameter_name: The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). :type stored_procedure_table_type_parameter_name: object :param table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). :type table_option: object + :param sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or + Expression with resultType boolean). + :type sql_writer_use_table_lock: object + :param write_behavior: Write behavior when copying data into sql server. Type: + SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum). + :type write_behavior: object + :param upsert_settings: SQL upsert settings. + :type upsert_settings: ~azure.mgmt.datafactory.models.SqlUpsertSettings """ _validation = { @@ -36804,12 +38208,16 @@ class SqlServerSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, 'table_option': {'key': 'tableOption', 'type': 'object'}, + 'sql_writer_use_table_lock': {'key': 'sqlWriterUseTableLock', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + 'upsert_settings': {'key': 'upsertSettings', 'type': 'SqlUpsertSettings'}, } def __init__( @@ -36821,15 +38229,19 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, sql_writer_stored_procedure_name: Optional[object] = None, sql_writer_table_type: Optional[object] = None, pre_copy_script: Optional[object] = None, stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, stored_procedure_table_type_parameter_name: Optional[object] = None, table_option: Optional[object] = None, + sql_writer_use_table_lock: Optional[object] = None, + write_behavior: Optional[object] = None, + upsert_settings: Optional["SqlUpsertSettings"] = None, **kwargs ): - super(SqlServerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SqlServerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SqlServerSink' # type: str self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name self.sql_writer_table_type = sql_writer_table_type @@ -36837,6 +38249,9 @@ def __init__( self.stored_procedure_parameters = stored_procedure_parameters self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name self.table_option = table_option + self.sql_writer_use_table_lock = sql_writer_use_table_lock + self.write_behavior = write_behavior + self.upsert_settings = upsert_settings class SqlServerSource(TabularSource): @@ -36858,12 +38273,15 @@ class SqlServerSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database @@ -36873,14 +38291,14 @@ class SqlServerSource(TabularSource): :param stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". :type stored_procedure_parameters: dict[str, - ~data_factory_management_client.models.StoredProcedureParameter] + ~azure.mgmt.datafactory.models.StoredProcedureParameter] :param produce_additional_types: Which additional types to produce. :type produce_additional_types: object :param partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". :type partition_option: object :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~data_factory_management_client.models.SqlPartitionSettings + :type partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings """ _validation = { @@ -36893,8 +38311,9 @@ class SqlServerSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, @@ -36910,8 +38329,9 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, sql_reader_query: Optional[object] = None, sql_reader_stored_procedure_name: Optional[object] = None, stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, @@ -36920,7 +38340,7 @@ def __init__( partition_settings: Optional["SqlPartitionSettings"] = None, **kwargs ): - super(SqlServerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SqlServerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SqlServerSource' # type: str self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name @@ -36945,20 +38365,20 @@ class SqlServerStoredProcedureActivity(ExecutionActivity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param linked_service_name: Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. - :type policy: ~data_factory_management_client.models.ActivityPolicy + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param stored_procedure_name: Required. Stored procedure name. Type: string (or Expression with resultType string). :type stored_procedure_name: object :param stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". :type stored_procedure_parameters: dict[str, - ~data_factory_management_client.models.StoredProcedureParameter] + ~azure.mgmt.datafactory.models.StoredProcedureParameter] """ _validation = { @@ -37019,14 +38439,14 @@ class SqlServerTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. :type table_name: object @@ -37106,6 +38526,9 @@ class SqlSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). :type sql_writer_stored_procedure_name: object @@ -37117,13 +38540,21 @@ class SqlSink(CopySink): :type pre_copy_script: object :param stored_procedure_parameters: SQL stored procedure parameters. :type stored_procedure_parameters: dict[str, - ~data_factory_management_client.models.StoredProcedureParameter] + ~azure.mgmt.datafactory.models.StoredProcedureParameter] :param stored_procedure_table_type_parameter_name: The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). :type stored_procedure_table_type_parameter_name: object :param table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). :type table_option: object + :param sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or + Expression with resultType boolean). + :type sql_writer_use_table_lock: object + :param write_behavior: Write behavior when copying data into sql. Type: SqlWriteBehaviorEnum + (or Expression with resultType SqlWriteBehaviorEnum). + :type write_behavior: object + :param upsert_settings: SQL upsert settings. + :type upsert_settings: ~azure.mgmt.datafactory.models.SqlUpsertSettings """ _validation = { @@ -37138,12 +38569,16 @@ class SqlSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, 'table_option': {'key': 'tableOption', 'type': 'object'}, + 'sql_writer_use_table_lock': {'key': 'sqlWriterUseTableLock', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + 'upsert_settings': {'key': 'upsertSettings', 'type': 'SqlUpsertSettings'}, } def __init__( @@ -37155,15 +38590,19 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, sql_writer_stored_procedure_name: Optional[object] = None, sql_writer_table_type: Optional[object] = None, pre_copy_script: Optional[object] = None, stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, stored_procedure_table_type_parameter_name: Optional[object] = None, table_option: Optional[object] = None, + sql_writer_use_table_lock: Optional[object] = None, + write_behavior: Optional[object] = None, + upsert_settings: Optional["SqlUpsertSettings"] = None, **kwargs ): - super(SqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SqlSink' # type: str self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name self.sql_writer_table_type = sql_writer_table_type @@ -37171,6 +38610,9 @@ def __init__( self.stored_procedure_parameters = stored_procedure_parameters self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name self.table_option = table_option + self.sql_writer_use_table_lock = sql_writer_use_table_lock + self.write_behavior = write_behavior + self.upsert_settings = upsert_settings class SqlSource(TabularSource): @@ -37192,12 +38634,15 @@ class SqlSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database @@ -37207,7 +38652,7 @@ class SqlSource(TabularSource): :param stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". :type stored_procedure_parameters: dict[str, - ~data_factory_management_client.models.StoredProcedureParameter] + ~azure.mgmt.datafactory.models.StoredProcedureParameter] :param isolation_level: Specifies the transaction locking behavior for the SQL source. Allowed values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value is ReadCommitted. Type: string (or Expression with resultType string). @@ -37216,7 +38661,7 @@ class SqlSource(TabularSource): Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". :type partition_option: object :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~data_factory_management_client.models.SqlPartitionSettings + :type partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings """ _validation = { @@ -37229,8 +38674,9 @@ class SqlSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, @@ -37246,8 +38692,9 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, sql_reader_query: Optional[object] = None, sql_reader_stored_procedure_name: Optional[object] = None, stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, @@ -37256,7 +38703,7 @@ def __init__( partition_settings: Optional["SqlPartitionSettings"] = None, **kwargs ): - super(SqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SqlSource' # type: str self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name @@ -37266,6 +38713,40 @@ def __init__( self.partition_settings = partition_settings +class SqlUpsertSettings(msrest.serialization.Model): + """Sql upsert option settings. + + :param use_temp_db: Specifies whether to use temp db for upsert interim table. Type: boolean + (or Expression with resultType boolean). + :type use_temp_db: object + :param interim_schema_name: Schema name for interim table. Type: string (or Expression with + resultType string). + :type interim_schema_name: object + :param keys: Key column names for unique row identification. Type: array of strings (or + Expression with resultType array of strings). + :type keys: object + """ + + _attribute_map = { + 'use_temp_db': {'key': 'useTempDB', 'type': 'object'}, + 'interim_schema_name': {'key': 'interimSchemaName', 'type': 'object'}, + 'keys': {'key': 'keys', 'type': 'object'}, + } + + def __init__( + self, + *, + use_temp_db: Optional[object] = None, + interim_schema_name: Optional[object] = None, + keys: Optional[object] = None, + **kwargs + ): + super(SqlUpsertSettings, self).__init__(**kwargs) + self.use_temp_db = use_temp_db + self.interim_schema_name = interim_schema_name + self.keys = keys + + class SquareLinkedService(LinkedService): """Square Service linked service. @@ -37277,11 +38758,11 @@ class SquareLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_properties: Properties used to connect to Square. It is mutually exclusive @@ -37292,7 +38773,7 @@ class SquareLinkedService(LinkedService): :param client_id: The client ID associated with your Square application. :type client_id: object :param client_secret: The client secret associated with your Square application. - :type client_secret: ~data_factory_management_client.models.SecretBase + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase :param redirect_uri: The redirect URL assigned in the Square application dashboard. (i.e. http://localhost:2500). :type redirect_uri: object @@ -37385,14 +38866,14 @@ class SquareObjectDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ @@ -37453,12 +38934,15 @@ class SquareSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -37474,8 +38958,9 @@ class SquareSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -37486,12 +38971,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(SquareSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SquareSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SquareSource' # type: str self.query = query @@ -37506,7 +38992,7 @@ class SsisAccessCredential(msrest.serialization.Model): :param user_name: Required. UseName for windows authentication. :type user_name: object :param password: Required. Password for windows authentication. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase """ _validation = { @@ -37590,7 +39076,7 @@ class SsisObjectMetadata(msrest.serialization.Model): :param type: Required. Type of metadata.Constant filled by server. Possible values include: "Folder", "Project", "Package", "Environment". - :type type: str or ~data_factory_management_client.models.SsisObjectMetadataType + :type type: str or ~azure.mgmt.datafactory.models.SsisObjectMetadataType :param id: Metadata id. :type id: long :param name: Metadata name. @@ -37636,7 +39122,7 @@ class SsisEnvironment(SsisObjectMetadata): :param type: Required. Type of metadata.Constant filled by server. Possible values include: "Folder", "Project", "Package", "Environment". - :type type: str or ~data_factory_management_client.models.SsisObjectMetadataType + :type type: str or ~azure.mgmt.datafactory.models.SsisObjectMetadataType :param id: Metadata id. :type id: long :param name: Metadata name. @@ -37646,7 +39132,7 @@ class SsisEnvironment(SsisObjectMetadata): :param folder_id: Folder id which contains environment. :type folder_id: long :param variables: Variable in environment. - :type variables: list[~data_factory_management_client.models.SsisVariable] + :type variables: list[~azure.mgmt.datafactory.models.SsisVariable] """ _validation = { @@ -37724,7 +39210,7 @@ class SsisExecutionCredential(msrest.serialization.Model): :param user_name: Required. UseName for windows authentication. :type user_name: object :param password: Required. Password for windows authentication. - :type password: ~data_factory_management_client.models.SecureString + :type password: ~azure.mgmt.datafactory.models.SecureString """ _validation = { @@ -37788,7 +39274,7 @@ class SsisFolder(SsisObjectMetadata): :param type: Required. Type of metadata.Constant filled by server. Possible values include: "Folder", "Project", "Package", "Environment". - :type type: str or ~data_factory_management_client.models.SsisObjectMetadataType + :type type: str or ~azure.mgmt.datafactory.models.SsisObjectMetadataType :param id: Metadata id. :type id: long :param name: Metadata name. @@ -37829,9 +39315,9 @@ class SsisLogLocation(msrest.serialization.Model): with resultType string). :type log_path: object :param type: Required. The type of SSIS log location. Possible values include: "File". - :type type: str or ~data_factory_management_client.models.SsisLogLocationType + :type type: str or ~azure.mgmt.datafactory.models.SsisLogLocationType :param access_credential: The package execution log access credential. - :type access_credential: ~data_factory_management_client.models.SsisAccessCredential + :type access_credential: ~azure.mgmt.datafactory.models.SsisAccessCredential :param log_refresh_interval: Specifies the interval to refresh log. The default interval is 5 minutes. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). @@ -37870,7 +39356,7 @@ class SsisObjectMetadataListResponse(msrest.serialization.Model): """A list of SSIS object metadata. :param value: List of SSIS object metadata. - :type value: list[~data_factory_management_client.models.SsisObjectMetadata] + :type value: list[~azure.mgmt.datafactory.models.SsisObjectMetadata] :param next_link: The link to the next page of results, if any remaining results exist. :type next_link: str """ @@ -37935,7 +39421,7 @@ class SsisPackage(SsisObjectMetadata): :param type: Required. Type of metadata.Constant filled by server. Possible values include: "Folder", "Project", "Package", "Environment". - :type type: str or ~data_factory_management_client.models.SsisObjectMetadataType + :type type: str or ~azure.mgmt.datafactory.models.SsisObjectMetadataType :param id: Metadata id. :type id: long :param name: Metadata name. @@ -37949,7 +39435,7 @@ class SsisPackage(SsisObjectMetadata): :param project_id: Project id which contains package. :type project_id: long :param parameters: Parameters in package. - :type parameters: list[~data_factory_management_client.models.SsisParameter] + :type parameters: list[~azure.mgmt.datafactory.models.SsisParameter] """ _validation = { @@ -37995,17 +39481,16 @@ class SsisPackageLocation(msrest.serialization.Model): :type package_path: object :param type: The type of SSIS package location. Possible values include: "SSISDB", "File", "InlinePackage", "PackageStore". - :type type: str or ~data_factory_management_client.models.SsisPackageLocationType + :type type: str or ~azure.mgmt.datafactory.models.SsisPackageLocationType :param package_password: Password of the package. - :type package_password: ~data_factory_management_client.models.SecretBase + :type package_password: ~azure.mgmt.datafactory.models.SecretBase :param access_credential: The package access credential. - :type access_credential: ~data_factory_management_client.models.SsisAccessCredential + :type access_credential: ~azure.mgmt.datafactory.models.SsisAccessCredential :param configuration_path: The configuration file of the package execution. Type: string (or Expression with resultType string). :type configuration_path: object :param configuration_access_credential: The configuration file access credential. - :type configuration_access_credential: - ~data_factory_management_client.models.SsisAccessCredential + :type configuration_access_credential: ~azure.mgmt.datafactory.models.SsisAccessCredential :param package_name: The package name. :type package_name: str :param package_content: The embedded package content. Type: string (or Expression with @@ -38014,7 +39499,7 @@ class SsisPackageLocation(msrest.serialization.Model): :param package_last_modified_date: The embedded package last modified date. :type package_last_modified_date: str :param child_packages: The embedded child package list. - :type child_packages: list[~data_factory_management_client.models.SsisChildPackage] + :type child_packages: list[~azure.mgmt.datafactory.models.SsisChildPackage] """ _attribute_map = { @@ -38141,7 +39626,7 @@ class SsisProject(SsisObjectMetadata): :param type: Required. Type of metadata.Constant filled by server. Possible values include: "Folder", "Project", "Package", "Environment". - :type type: str or ~data_factory_management_client.models.SsisObjectMetadataType + :type type: str or ~azure.mgmt.datafactory.models.SsisObjectMetadataType :param id: Metadata id. :type id: long :param name: Metadata name. @@ -38153,9 +39638,9 @@ class SsisProject(SsisObjectMetadata): :param version: Project version. :type version: long :param environment_refs: Environment reference in project. - :type environment_refs: list[~data_factory_management_client.models.SsisEnvironmentReference] + :type environment_refs: list[~azure.mgmt.datafactory.models.SsisEnvironmentReference] :param parameters: Parameters in project. - :type parameters: list[~data_factory_management_client.models.SsisParameter] + :type parameters: list[~azure.mgmt.datafactory.models.SsisParameter] """ _validation = { @@ -38287,7 +39772,7 @@ class StagingSettings(msrest.serialization.Model): collection. :type additional_properties: dict[str, object] :param linked_service_name: Required. Staging linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param path: The path to storage for storing the interim data. Type: string (or Expression with resultType string). :type path: object @@ -38331,7 +39816,7 @@ class StoredProcedureParameter(msrest.serialization.Model): :type value: object :param type: Stored procedure parameter type. Possible values include: "String", "Int", "Int64", "Decimal", "Guid", "Boolean", "Date". - :type type: str or ~data_factory_management_client.models.StoredProcedureParameterType + :type type: str or ~azure.mgmt.datafactory.models.StoredProcedureParameterType """ _attribute_map = { @@ -38366,19 +39851,19 @@ class SwitchActivity(Activity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param on: Required. An expression that would evaluate to a string or integer. This is used to determine the block of activities in cases that will be executed. - :type on: ~data_factory_management_client.models.Expression + :type on: ~azure.mgmt.datafactory.models.Expression :param cases: List of cases that correspond to expected values of the 'on' property. This is an optional property and if not provided, the activity will execute activities provided in defaultActivities. - :type cases: list[~data_factory_management_client.models.SwitchCase] + :type cases: list[~azure.mgmt.datafactory.models.SwitchCase] :param default_activities: List of activities to execute if no case condition is satisfied. This is an optional property and if not provided, the activity will exit without any action. - :type default_activities: list[~data_factory_management_client.models.Activity] + :type default_activities: list[~azure.mgmt.datafactory.models.Activity] """ _validation = { @@ -38425,7 +39910,7 @@ class SwitchCase(msrest.serialization.Model): :param value: Expected value that satisfies the expression result of the 'on' property. :type value: str :param activities: List of activities to execute for satisfied case condition. - :type activities: list[~data_factory_management_client.models.Activity] + :type activities: list[~azure.mgmt.datafactory.models.Activity] """ _attribute_map = { @@ -38456,11 +39941,11 @@ class SybaseLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param server: Required. Server name for connection. Type: string (or Expression with @@ -38473,13 +39958,12 @@ class SybaseLinkedService(LinkedService): :type schema: object :param authentication_type: AuthenticationType to be used for connection. Possible values include: "Basic", "Windows". - :type authentication_type: str or - ~data_factory_management_client.models.SybaseAuthenticationType + :type authentication_type: str or ~azure.mgmt.datafactory.models.SybaseAuthenticationType :param username: Username for authentication. Type: string (or Expression with resultType string). :type username: object :param password: Password for authentication. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -38555,12 +40039,15 @@ class SybaseSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object """ @@ -38575,8 +40062,9 @@ class SybaseSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -38587,12 +40075,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(SybaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SybaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SybaseSource' # type: str self.query = query @@ -38616,14 +40105,14 @@ class SybaseTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The Sybase table name. Type: string (or Expression with resultType string). :type table_name: object """ @@ -38700,7 +40189,7 @@ class TabularTranslator(CopyTranslator): activity. Type: boolean (or Expression with resultType boolean). :type type_conversion: object :param type_conversion_settings: Type conversion settings. - :type type_conversion_settings: ~data_factory_management_client.models.TypeConversionSettings + :type type_conversion_settings: ~azure.mgmt.datafactory.models.TypeConversionSettings """ _validation = { @@ -38828,11 +40317,11 @@ class TeradataLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: Teradata ODBC connection string. Type: string, SecureString or @@ -38842,13 +40331,12 @@ class TeradataLinkedService(LinkedService): :type server: object :param authentication_type: AuthenticationType to be used for connection. Possible values include: "Basic", "Windows". - :type authentication_type: str or - ~data_factory_management_client.models.TeradataAuthenticationType + :type authentication_type: str or ~azure.mgmt.datafactory.models.TeradataAuthenticationType :param username: Username for authentication. Type: string (or Expression with resultType string). :type username: object :param password: Password for authentication. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -38955,12 +40443,15 @@ class TeradataSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: Teradata query. Type: string (or Expression with resultType string). :type query: object :param partition_option: The partition mechanism that will be used for teradata read in @@ -38968,7 +40459,7 @@ class TeradataSource(TabularSource): :type partition_option: object :param partition_settings: The settings that will be leveraged for teradata source partitioning. - :type partition_settings: ~data_factory_management_client.models.TeradataPartitionSettings + :type partition_settings: ~azure.mgmt.datafactory.models.TeradataPartitionSettings """ _validation = { @@ -38981,8 +40472,9 @@ class TeradataSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'object'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'TeradataPartitionSettings'}, @@ -38995,14 +40487,15 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, partition_option: Optional[object] = None, partition_settings: Optional["TeradataPartitionSettings"] = None, **kwargs ): - super(TeradataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(TeradataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'TeradataSource' # type: str self.query = query self.partition_option = partition_option @@ -39028,14 +40521,14 @@ class TeradataTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param database: The database name of Teradata. Type: string (or Expression with resultType string). :type database: object @@ -39187,7 +40680,7 @@ class TriggerDependencyReference(DependencyReference): :param type: Required. The type of dependency reference.Constant filled by server. :type type: str :param reference_trigger: Required. Referenced trigger. - :type reference_trigger: ~data_factory_management_client.models.TriggerReference + :type reference_trigger: ~azure.mgmt.datafactory.models.TriggerReference """ _validation = { @@ -39249,7 +40742,7 @@ class TriggerListResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. :param value: Required. List of triggers. - :type value: list[~data_factory_management_client.models.TriggerResource] + :type value: list[~azure.mgmt.datafactory.models.TriggerResource] :param next_link: The link to the next page of results, if any remaining results exist. :type next_link: str """ @@ -39279,7 +40772,7 @@ class TriggerPipelineReference(msrest.serialization.Model): """Pipeline that needs to be triggered with the given parameters. :param pipeline_reference: Pipeline reference. - :type pipeline_reference: ~data_factory_management_client.models.PipelineReference + :type pipeline_reference: ~azure.mgmt.datafactory.models.PipelineReference :param parameters: Pipeline parameters. :type parameters: dict[str, object] """ @@ -39307,7 +40800,7 @@ class TriggerQueryResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. :param value: Required. List of triggers. - :type value: list[~data_factory_management_client.models.TriggerResource] + :type value: list[~azure.mgmt.datafactory.models.TriggerResource] :param continuation_token: The continuation token for getting the next page of results, if any remaining results exist, null otherwise. :type continuation_token: str @@ -39385,7 +40878,7 @@ class TriggerResource(SubResource): :ivar etag: Etag identifies change in the resource. :vartype etag: str :param properties: Required. Properties of the trigger. - :type properties: ~data_factory_management_client.models.Trigger + :type properties: ~azure.mgmt.datafactory.models.Trigger """ _validation = { @@ -39431,7 +40924,7 @@ class TriggerRun(msrest.serialization.Model): :ivar trigger_run_timestamp: Trigger run start time. :vartype trigger_run_timestamp: ~datetime.datetime :ivar status: Trigger run status. Possible values include: "Succeeded", "Failed", "Inprogress". - :vartype status: str or ~data_factory_management_client.models.TriggerRunStatus + :vartype status: str or ~azure.mgmt.datafactory.models.TriggerRunStatus :ivar message: Trigger error message. :vartype message: str :ivar properties: List of property name and value related to trigger run. Name, value pair @@ -39498,7 +40991,7 @@ class TriggerRunsQueryResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. :param value: Required. List of trigger runs. - :type value: list[~data_factory_management_client.models.TriggerRun] + :type value: list[~azure.mgmt.datafactory.models.TriggerRun] :param continuation_token: The continuation token for getting the next page of results, if any remaining results exist, null otherwise. :type continuation_token: str @@ -39534,7 +41027,7 @@ class TriggerSubscriptionOperationStatus(msrest.serialization.Model): :vartype trigger_name: str :ivar status: Event Subscription Status. Possible values include: "Enabled", "Provisioning", "Deprovisioning", "Disabled", "Unknown". - :vartype status: str or ~data_factory_management_client.models.EventSubscriptionStatus + :vartype status: str or ~azure.mgmt.datafactory.models.EventSubscriptionStatus """ _validation = { @@ -39572,15 +41065,15 @@ class TumblingWindowTrigger(Trigger): :type description: str :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". - :vartype runtime_state: str or ~data_factory_management_client.models.TriggerRuntimeState + :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState :param annotations: List of tags that can be used for describing the trigger. :type annotations: list[object] :param pipeline: Required. Pipeline for which runs are created when an event is fired for trigger window that is ready. - :type pipeline: ~data_factory_management_client.models.TriggerPipelineReference + :type pipeline: ~azure.mgmt.datafactory.models.TriggerPipelineReference :param frequency: Required. The frequency of the time windows. Possible values include: "Minute", "Hour", "Month". - :type frequency: str or ~data_factory_management_client.models.TumblingWindowFrequency + :type frequency: str or ~azure.mgmt.datafactory.models.TumblingWindowFrequency :param interval: Required. The interval of the time windows. The minimum interval allowed is 15 Minutes. :type interval: int @@ -39598,10 +41091,10 @@ class TumblingWindowTrigger(Trigger): for which a new run is triggered. :type max_concurrency: int :param retry_policy: Retry policy that will be applied for failed pipeline runs. - :type retry_policy: ~data_factory_management_client.models.RetryPolicy + :type retry_policy: ~azure.mgmt.datafactory.models.RetryPolicy :param depends_on: Triggers that this trigger depends on. Only tumbling window triggers are supported. - :type depends_on: list[~data_factory_management_client.models.DependencyReference] + :type depends_on: list[~azure.mgmt.datafactory.models.DependencyReference] """ _validation = { @@ -39669,7 +41162,7 @@ class TumblingWindowTriggerDependencyReference(TriggerDependencyReference): :param type: Required. The type of dependency reference.Constant filled by server. :type type: str :param reference_trigger: Required. Referenced trigger. - :type reference_trigger: ~data_factory_management_client.models.TriggerReference + :type reference_trigger: ~azure.mgmt.datafactory.models.TriggerReference :param offset: Timespan applied to the start time of a tumbling window when evaluating dependency. :type offset: str @@ -39773,12 +41266,12 @@ class UntilActivity(Activity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param expression: Required. An expression that would evaluate to Boolean. The loop will continue until this expression evaluates to true. - :type expression: ~data_factory_management_client.models.Expression + :type expression: ~azure.mgmt.datafactory.models.Expression :param timeout: Specifies the timeout for the activity to run. If there is no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 week as default. Type: string (or Expression with resultType string), pattern: @@ -39786,7 +41279,7 @@ class UntilActivity(Activity): resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type timeout: object :param activities: Required. List of activities to execute. - :type activities: list[~data_factory_management_client.models.Activity] + :type activities: list[~azure.mgmt.datafactory.models.Activity] """ _validation = { @@ -39860,7 +41353,7 @@ class UpdateIntegrationRuntimeRequest(msrest.serialization.Model): :param auto_update: Enables or disables the auto-update feature of the self-hosted integration runtime. See https://go.microsoft.com/fwlink/?linkid=854189. Possible values include: "On", "Off". - :type auto_update: str or ~data_factory_management_client.models.IntegrationRuntimeAutoUpdate + :type auto_update: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeAutoUpdate :param update_delay_offset: The time offset (in hours) in the day, e.g., PT03H is 3 hours. The integration runtime auto update will happen on that time. :type update_delay_offset: str @@ -39977,9 +41470,9 @@ class ValidationActivity(Activity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param timeout: Specifies the timeout for the activity to run. If there is no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 week as default. Type: string (or Expression with resultType string), pattern: @@ -39996,7 +41489,7 @@ class ValidationActivity(Activity): with resultType boolean). :type child_items: object :param dataset: Required. Validation activity dataset reference. - :type dataset: ~data_factory_management_client.models.DatasetReference + :type dataset: ~azure.mgmt.datafactory.models.DatasetReference """ _validation = { @@ -40049,7 +41542,7 @@ class VariableSpecification(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. :param type: Required. Variable type. Possible values include: "String", "Bool", "Array". - :type type: str or ~data_factory_management_client.models.VariableType + :type type: str or ~azure.mgmt.datafactory.models.VariableType :param default_value: Default value of variable. :type default_value: object """ @@ -40086,18 +41579,18 @@ class VerticaLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param pwd: The Azure key vault secret reference of password in connection string. - :type pwd: ~data_factory_management_client.models.AzureKeyVaultSecretReference + :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -40159,12 +41652,15 @@ class VerticaSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -40180,8 +41676,9 @@ class VerticaSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -40192,12 +41689,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(VerticaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(VerticaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'VerticaSource' # type: str self.query = query @@ -40221,14 +41719,14 @@ class VerticaTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. :type table_name: object @@ -40298,9 +41796,9 @@ class WaitActivity(Activity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param wait_time_in_seconds: Required. Duration in seconds. :type wait_time_in_seconds: object """ @@ -40352,16 +41850,16 @@ class WebActivity(ExecutionActivity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param linked_service_name: Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. - :type policy: ~data_factory_management_client.models.ActivityPolicy + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param method: Required. Rest API method for target endpoint. Possible values include: "GET", "POST", "PUT", "DELETE". - :type method: str or ~data_factory_management_client.models.WebActivityMethod + :type method: str or ~azure.mgmt.datafactory.models.WebActivityMethod :param url: Required. Web activity target endpoint and path. Type: string (or Expression with resultType string). :type url: object @@ -40373,13 +41871,13 @@ class WebActivity(ExecutionActivity): method, not allowed for GET method Type: string (or Expression with resultType string). :type body: object :param authentication: Authentication method used for calling the endpoint. - :type authentication: ~data_factory_management_client.models.WebActivityAuthentication + :type authentication: ~azure.mgmt.datafactory.models.WebActivityAuthentication :param datasets: List of datasets passed to web endpoint. - :type datasets: list[~data_factory_management_client.models.DatasetReference] + :type datasets: list[~azure.mgmt.datafactory.models.DatasetReference] :param linked_services: List of linked services passed to web endpoint. - :type linked_services: list[~data_factory_management_client.models.LinkedServiceReference] + :type linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference """ _validation = { @@ -40443,32 +41941,27 @@ def __init__( class WebActivityAuthentication(msrest.serialization.Model): """Web activity authentication properties. - All required parameters must be populated in order to send to Azure. - - :param type: Required. Web activity authentication - (Basic/ClientCertificate/MSI/ServicePrincipal). + :param type: Web activity authentication (Basic/ClientCertificate/MSI/ServicePrincipal). :type type: str :param pfx: Base64-encoded contents of a PFX file or Certificate when used for ServicePrincipal. - :type pfx: ~data_factory_management_client.models.SecretBase + :type pfx: ~azure.mgmt.datafactory.models.SecretBase :param username: Web activity authentication user name for basic authentication or ClientID when used for ServicePrincipal. Type: string (or Expression with resultType string). :type username: object :param password: Password for the PFX file or basic authentication / Secret when used for ServicePrincipal. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase :param resource: Resource for which Azure Auth token will be requested when using MSI Authentication. Type: string (or Expression with resultType string). :type resource: object :param user_tenant: TenantId for which Azure Auth token will be requested when using ServicePrincipal Authentication. Type: string (or Expression with resultType string). :type user_tenant: object + :param credential: The credential reference containing authentication information. + :type credential: ~azure.mgmt.datafactory.models.CredentialReference """ - _validation = { - 'type': {'required': True}, - } - _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'pfx': {'key': 'pfx', 'type': 'SecretBase'}, @@ -40476,17 +41969,19 @@ class WebActivityAuthentication(msrest.serialization.Model): 'password': {'key': 'password', 'type': 'SecretBase'}, 'resource': {'key': 'resource', 'type': 'object'}, 'user_tenant': {'key': 'userTenant', 'type': 'object'}, + 'credential': {'key': 'credential', 'type': 'CredentialReference'}, } def __init__( self, *, - type: str, + type: Optional[str] = None, pfx: Optional["SecretBase"] = None, username: Optional[object] = None, password: Optional["SecretBase"] = None, resource: Optional[object] = None, user_tenant: Optional[object] = None, + credential: Optional["CredentialReference"] = None, **kwargs ): super(WebActivityAuthentication, self).__init__(**kwargs) @@ -40496,6 +41991,7 @@ def __init__( self.password = password self.resource = resource self.user_tenant = user_tenant + self.credential = credential class WebLinkedServiceTypeProperties(msrest.serialization.Model): @@ -40512,7 +42008,7 @@ class WebLinkedServiceTypeProperties(msrest.serialization.Model): :param authentication_type: Required. Type of authentication used to connect to the web table source.Constant filled by server. Possible values include: "Basic", "Anonymous", "ClientCertificate". - :type authentication_type: str or ~data_factory_management_client.models.WebAuthenticationType + :type authentication_type: str or ~azure.mgmt.datafactory.models.WebAuthenticationType """ _validation = { @@ -40551,7 +42047,7 @@ class WebAnonymousAuthentication(WebLinkedServiceTypeProperties): :param authentication_type: Required. Type of authentication used to connect to the web table source.Constant filled by server. Possible values include: "Basic", "Anonymous", "ClientCertificate". - :type authentication_type: str or ~data_factory_management_client.models.WebAuthenticationType + :type authentication_type: str or ~azure.mgmt.datafactory.models.WebAuthenticationType """ _validation = { @@ -40585,12 +42081,12 @@ class WebBasicAuthentication(WebLinkedServiceTypeProperties): :param authentication_type: Required. Type of authentication used to connect to the web table source.Constant filled by server. Possible values include: "Basic", "Anonymous", "ClientCertificate". - :type authentication_type: str or ~data_factory_management_client.models.WebAuthenticationType + :type authentication_type: str or ~azure.mgmt.datafactory.models.WebAuthenticationType :param username: Required. User name for Basic authentication. Type: string (or Expression with resultType string). :type username: object :param password: Required. The password for Basic authentication. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase """ _validation = { @@ -40632,11 +42128,11 @@ class WebClientCertificateAuthentication(WebLinkedServiceTypeProperties): :param authentication_type: Required. Type of authentication used to connect to the web table source.Constant filled by server. Possible values include: "Basic", "Anonymous", "ClientCertificate". - :type authentication_type: str or ~data_factory_management_client.models.WebAuthenticationType + :type authentication_type: str or ~azure.mgmt.datafactory.models.WebAuthenticationType :param pfx: Required. Base64-encoded contents of a PFX file. - :type pfx: ~data_factory_management_client.models.SecretBase + :type pfx: ~azure.mgmt.datafactory.models.SecretBase :param password: Required. Password for the PFX file. - :type password: ~data_factory_management_client.models.SecretBase + :type password: ~azure.mgmt.datafactory.models.SecretBase """ _validation = { @@ -40682,11 +42178,11 @@ class WebHookActivity(Activity): :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param method: Required. Rest API method for target endpoint. Possible values include: "POST". - :type method: str or ~data_factory_management_client.models.WebHookActivityMethod + :type method: str or ~azure.mgmt.datafactory.models.WebHookActivityMethod :param url: Required. WebHook activity target endpoint and path. Type: string (or Expression with resultType string). :type url: object @@ -40702,7 +42198,7 @@ class WebHookActivity(Activity): method, not allowed for GET method Type: string (or Expression with resultType string). :type body: object :param authentication: Authentication method used for calling the endpoint. - :type authentication: ~data_factory_management_client.models.WebActivityAuthentication + :type authentication: ~azure.mgmt.datafactory.models.WebActivityAuthentication :param report_status_on_call_back: When set to true, statusCode, output and error in callback request body will be consumed by activity. The activity can be marked as failed by setting statusCode >= 400 in callback request. Default is false. Type: boolean (or Expression with @@ -40772,15 +42268,15 @@ class WebLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type_properties: Required. Web linked service properties. - :type type_properties: ~data_factory_management_client.models.WebLinkedServiceTypeProperties + :type type_properties: ~azure.mgmt.datafactory.models.WebLinkedServiceTypeProperties """ _validation = { @@ -40833,9 +42329,12 @@ class WebSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -40848,7 +42347,8 @@ class WebSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -40858,10 +42358,11 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + disable_metrics_collection: Optional[object] = None, + additional_columns: Optional[object] = None, **kwargs ): - super(WebSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(WebSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'WebSource' # type: str self.additional_columns = additional_columns @@ -40885,14 +42386,14 @@ class WebTableDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param index: Required. The zero-based index of the table in the web page. Type: integer (or Expression with resultType integer), minimum: 0. :type index: object @@ -40953,11 +42454,11 @@ class XeroLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_properties: Properties used to connect to Xero. It is mutually exclusive with @@ -40966,11 +42467,11 @@ class XeroLinkedService(LinkedService): :param host: The endpoint of the Xero server. (i.e. api.xero.com). :type host: object :param consumer_key: The consumer key associated with the Xero application. - :type consumer_key: ~data_factory_management_client.models.SecretBase + :type consumer_key: ~azure.mgmt.datafactory.models.SecretBase :param private_key: The private key from the .pem file that was generated for your Xero private application. You must include all the text from the .pem file, including the Unix line endings( ). - :type private_key: ~data_factory_management_client.models.SecretBase + :type private_key: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. :type use_encrypted_endpoints: object @@ -41057,14 +42558,14 @@ class XeroObjectDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ @@ -41125,12 +42626,15 @@ class XeroSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -41146,8 +42650,9 @@ class XeroSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -41158,12 +42663,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(XeroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(XeroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'XeroSource' # type: str self.query = query @@ -41187,16 +42693,16 @@ class XmlDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param location: The location of the json data storage. - :type location: ~data_factory_management_client.models.DatasetLocation + :type location: ~azure.mgmt.datafactory.models.DatasetLocation :param encoding_name: The code page name of the preferred encoding. If not specified, the default value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column of the table in the following link to set supported values: @@ -41206,7 +42712,7 @@ class XmlDataset(Dataset): :param null_value: The null value string. Type: string (or Expression with resultType string). :type null_value: object :param compression: The data compression method used for the json dataset. - :type compression: ~data_factory_management_client.models.DatasetCompression + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression """ _validation = { @@ -41266,7 +42772,7 @@ class XmlReadSettings(FormatReadSettings): :param type: Required. The read setting type.Constant filled by server. :type type: str :param compression_properties: Compression settings. - :type compression_properties: ~data_factory_management_client.models.CompressionReadSettings + :type compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings :param validation_mode: Indicates what validation method is used when reading the xml files. Allowed values: 'none', 'xsd', or 'dtd'. Type: string (or Expression with resultType string). :type validation_mode: object @@ -41336,13 +42842,16 @@ class XmlSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Xml store settings. - :type store_settings: ~data_factory_management_client.models.StoreReadSettings + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :param format_settings: Xml format settings. - :type format_settings: ~data_factory_management_client.models.XmlReadSettings + :type format_settings: ~azure.mgmt.datafactory.models.XmlReadSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -41355,9 +42864,10 @@ class XmlSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'XmlReadSettings'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -41367,12 +42877,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, store_settings: Optional["StoreReadSettings"] = None, format_settings: Optional["XmlReadSettings"] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, **kwargs ): - super(XmlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(XmlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'XmlSource' # type: str self.store_settings = store_settings self.format_settings = format_settings @@ -41427,11 +42938,11 @@ class ZohoLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param connection_properties: Properties used to connect to Zoho. It is mutually exclusive with @@ -41440,7 +42951,7 @@ class ZohoLinkedService(LinkedService): :param endpoint: The endpoint of the Zoho server. (i.e. crm.zoho.com/crm/private). :type endpoint: object :param access_token: The access token for Zoho authentication. - :type access_token: ~data_factory_management_client.models.SecretBase + :type access_token: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. :type use_encrypted_endpoints: object @@ -41524,14 +43035,14 @@ class ZohoObjectDataset(Dataset): Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~data_factory_management_client.models.DatasetFolder + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ @@ -41592,12 +43103,15 @@ class ZohoSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -41613,8 +43127,9 @@ class ZohoSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -41625,11 +43140,12 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(ZohoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(ZohoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'ZohoSource' # type: str self.query = query diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_activity_run_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_activity_run_operations.py deleted file mode 100644 index 192e09232ad..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_activity_run_operations.py +++ /dev/null @@ -1,132 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -import datetime -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class ActivityRunOperations(object): - """ActivityRunOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def query_by_pipeline_run( - self, - resource_group_name, # type: str - factory_name, # type: str - run_id, # type: str - last_updated_after, # type: datetime.datetime - last_updated_before, # type: datetime.datetime - continuation_token_parameter=None, # type: Optional[str] - filters=None, # type: Optional[List["models.RunQueryFilter"]] - order_by=None, # type: Optional[List["models.RunQueryOrderBy"]] - **kwargs # type: Any - ): - # type: (...) -> "models.ActivityRunsQueryResponse" - """Query activity runs based on input filter conditions. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param run_id: The pipeline run identifier. - :type run_id: str - :param last_updated_after: The time at or after which the run event was updated in 'ISO 8601' - format. - :type last_updated_after: ~datetime.datetime - :param last_updated_before: The time at or before which the run event was updated in 'ISO 8601' - format. - :type last_updated_before: ~datetime.datetime - :param continuation_token_parameter: The continuation token for getting the next page of - results. Null for first page. - :type continuation_token_parameter: str - :param filters: List of filters. - :type filters: list[~data_factory_management_client.models.RunQueryFilter] - :param order_by: List of OrderBy option. - :type order_by: list[~data_factory_management_client.models.RunQueryOrderBy] - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ActivityRunsQueryResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.ActivityRunsQueryResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ActivityRunsQueryResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - filter_parameters = models.RunFilterParameters(continuation_token=continuation_token_parameter, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, order_by=order_by) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.query_by_pipeline_run.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'runId': self._serialize.url("run_id", run_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('ActivityRunsQueryResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - query_by_pipeline_run.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}/queryActivityruns'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_activity_runs_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_activity_runs_operations.py index f51ff306dc7..9585a0a97f6 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_activity_runs_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_activity_runs_operations.py @@ -29,7 +29,7 @@ class ActivityRunsOperations(object): instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models + :type models: ~azure.mgmt.datafactory.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -62,10 +62,10 @@ def query_by_pipeline_run( :param run_id: The pipeline run identifier. :type run_id: str :param filter_parameters: Parameters to filter the activity runs. - :type filter_parameters: ~data_factory_management_client.models.RunFilterParameters + :type filter_parameters: ~azure.mgmt.datafactory.models.RunFilterParameters :keyword callable cls: A custom type or function that will be passed the direct response :return: ActivityRunsQueryResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.ActivityRunsQueryResponse + :rtype: ~azure.mgmt.datafactory.models.ActivityRunsQueryResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ActivityRunsQueryResponse"] diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flow_debug_session_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flow_debug_session_operations.py index 976a9653c6e..27e9c3c9bd2 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flow_debug_session_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flow_debug_session_operations.py @@ -32,7 +32,7 @@ class DataFlowDebugSessionOperations(object): instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models + :type models: ~azure.mgmt.datafactory.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -122,7 +122,7 @@ def begin_create( :param factory_name: The factory name. :type factory_name: str :param request: Data flow debug session definition. - :type request: ~data_factory_management_client.models.CreateDataFlowDebugSessionRequest + :type request: ~azure.mgmt.datafactory.models.CreateDataFlowDebugSessionRequest :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: True for ARMPolling, False for no polling, or a @@ -130,7 +130,7 @@ def begin_create( :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either CreateDataFlowDebugSessionResponse or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[~data_factory_management_client.models.CreateDataFlowDebugSessionResponse] + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datafactory.models.CreateDataFlowDebugSessionResponse] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] @@ -194,7 +194,7 @@ def query_by_factory( :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either QueryDataFlowDebugSessionsResponse or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.QueryDataFlowDebugSessionsResponse] + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.QueryDataFlowDebugSessionsResponse] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.QueryDataFlowDebugSessionsResponse"] @@ -269,10 +269,10 @@ def add_data_flow( :param factory_name: The factory name. :type factory_name: str :param request: Data flow debug session definition with debug content. - :type request: ~data_factory_management_client.models.DataFlowDebugPackage + :type request: ~azure.mgmt.datafactory.models.DataFlowDebugPackage :keyword callable cls: A custom type or function that will be passed the direct response :return: AddDataFlowToDebugSessionResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.AddDataFlowToDebugSessionResponse + :rtype: ~azure.mgmt.datafactory.models.AddDataFlowToDebugSessionResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.AddDataFlowToDebugSessionResponse"] @@ -336,7 +336,7 @@ def delete( :param factory_name: The factory name. :type factory_name: str :param request: Data flow debug session definition for deletion. - :type request: ~data_factory_management_client.models.DeleteDataFlowDebugSessionRequest + :type request: ~azure.mgmt.datafactory.models.DeleteDataFlowDebugSessionRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: None, or the result of cls(response) :rtype: None @@ -460,7 +460,7 @@ def begin_execute_command( :param factory_name: The factory name. :type factory_name: str :param request: Data flow debug command definition. - :type request: ~data_factory_management_client.models.DataFlowDebugCommandRequest + :type request: ~azure.mgmt.datafactory.models.DataFlowDebugCommandRequest :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: True for ARMPolling, False for no polling, or a @@ -468,7 +468,7 @@ def begin_execute_command( :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either DataFlowDebugCommandResponse or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[~data_factory_management_client.models.DataFlowDebugCommandResponse] + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datafactory.models.DataFlowDebugCommandResponse] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flow_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flow_operations.py deleted file mode 100644 index e0bd3be1783..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flow_operations.py +++ /dev/null @@ -1,317 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class DataFlowOperations(object): - """DataFlowOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def create_or_update( - self, - resource_group_name, # type: str - factory_name, # type: str - data_flow_name, # type: str - properties, # type: "models.DataFlow" - if_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.DataFlowResource" - """Creates or updates a data flow. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param data_flow_name: The data flow name. - :type data_flow_name: str - :param properties: Data flow properties. - :type properties: ~data_factory_management_client.models.DataFlow - :param if_match: ETag of the data flow entity. Should only be specified for update, for which - it should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: DataFlowResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.DataFlowResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - data_flow = models.DataFlowResource(properties=properties) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'dataFlowName': self._serialize.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(data_flow, 'DataFlowResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('DataFlowResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}'} # type: ignore - - def get( - self, - resource_group_name, # type: str - factory_name, # type: str - data_flow_name, # type: str - if_none_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.DataFlowResource" - """Gets a data flow. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param data_flow_name: The data flow name. - :type data_flow_name: str - :param if_none_match: ETag of the data flow entity. Should only be specified for get. If the - ETag matches the existing entity tag, or if * was provided, then no content will be returned. - :type if_none_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: DataFlowResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.DataFlowResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'dataFlowName': self._serialize.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('DataFlowResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}'} # type: ignore - - def delete( - self, - resource_group_name, # type: str - factory_name, # type: str - data_flow_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - """Deletes a data flow. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param data_flow_name: The data flow name. - :type data_flow_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'dataFlowName': self._serialize.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}'} # type: ignore - - def list_by_factory( - self, - resource_group_name, # type: str - factory_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Iterable["models.DataFlowListResponse"] - """Lists data flows. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either DataFlowListResponse or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.DataFlowListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - def extract_data(pipeline_response): - deserialized = self._deserialize('DataFlowListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flows_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flows_operations.py index 41292015b17..4ddb7d1f44a 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flows_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flows_operations.py @@ -30,7 +30,7 @@ class DataFlowsOperations(object): instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models + :type models: ~azure.mgmt.datafactory.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -64,13 +64,13 @@ def create_or_update( :param data_flow_name: The data flow name. :type data_flow_name: str :param data_flow: Data flow resource definition. - :type data_flow: ~data_factory_management_client.models.DataFlowResource + :type data_flow: ~azure.mgmt.datafactory.models.DataFlowResource :param if_match: ETag of the data flow entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. :type if_match: str :keyword callable cls: A custom type or function that will be passed the direct response :return: DataFlowResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.DataFlowResource + :rtype: ~azure.mgmt.datafactory.models.DataFlowResource :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowResource"] @@ -144,7 +144,7 @@ def get( :type if_none_match: str :keyword callable cls: A custom type or function that will be passed the direct response :return: DataFlowResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.DataFlowResource + :rtype: ~azure.mgmt.datafactory.models.DataFlowResource :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowResource"] @@ -266,7 +266,7 @@ def list_by_factory( :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either DataFlowListResponse or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.DataFlowListResponse] + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.DataFlowListResponse] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowListResponse"] diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_dataset_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_dataset_operations.py deleted file mode 100644 index 2f866416c74..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_dataset_operations.py +++ /dev/null @@ -1,319 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class DatasetOperations(object): - """DatasetOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list_by_factory( - self, - resource_group_name, # type: str - factory_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Iterable["models.DatasetListResponse"] - """Lists datasets. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either DatasetListResponse or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.DatasetListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.DatasetListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - def extract_data(pipeline_response): - deserialized = self._deserialize('DatasetListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets'} # type: ignore - - def create_or_update( - self, - resource_group_name, # type: str - factory_name, # type: str - dataset_name, # type: str - properties, # type: "models.Dataset" - if_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.DatasetResource" - """Creates or updates a dataset. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param dataset_name: The dataset name. - :type dataset_name: str - :param properties: Dataset properties. - :type properties: ~data_factory_management_client.models.Dataset - :param if_match: ETag of the dataset entity. Should only be specified for update, for which it - should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: DatasetResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.DatasetResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.DatasetResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - dataset = models.DatasetResource(properties=properties) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'datasetName': self._serialize.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(dataset, 'DatasetResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('DatasetResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}'} # type: ignore - - def get( - self, - resource_group_name, # type: str - factory_name, # type: str - dataset_name, # type: str - if_none_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Optional["models.DatasetResource"] - """Gets a dataset. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param dataset_name: The dataset name. - :type dataset_name: str - :param if_none_match: ETag of the dataset entity. Should only be specified for get. If the ETag - matches the existing entity tag, or if * was provided, then no content will be returned. - :type if_none_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: DatasetResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.DatasetResource or None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.DatasetResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'datasetName': self._serialize.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 304]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('DatasetResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}'} # type: ignore - - def delete( - self, - resource_group_name, # type: str - factory_name, # type: str - dataset_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - """Deletes a dataset. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param dataset_name: The dataset name. - :type dataset_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'datasetName': self._serialize.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_datasets_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_datasets_operations.py index 3ad92c858c9..f26e8b248f7 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_datasets_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_datasets_operations.py @@ -30,7 +30,7 @@ class DatasetsOperations(object): instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models + :type models: ~azure.mgmt.datafactory.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -60,7 +60,7 @@ def list_by_factory( :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either DatasetListResponse or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.DatasetListResponse] + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.DatasetListResponse] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.DatasetListResponse"] @@ -139,13 +139,13 @@ def create_or_update( :param dataset_name: The dataset name. :type dataset_name: str :param dataset: Dataset resource definition. - :type dataset: ~data_factory_management_client.models.DatasetResource + :type dataset: ~azure.mgmt.datafactory.models.DatasetResource :param if_match: ETag of the dataset entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. :type if_match: str :keyword callable cls: A custom type or function that will be passed the direct response :return: DatasetResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.DatasetResource + :rtype: ~azure.mgmt.datafactory.models.DatasetResource :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.DatasetResource"] @@ -219,7 +219,7 @@ def get( :type if_none_match: str :keyword callable cls: A custom type or function that will be passed the direct response :return: DatasetResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.DatasetResource or None + :rtype: ~azure.mgmt.datafactory.models.DatasetResource or None :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.DatasetResource"]] diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_exposure_control_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_exposure_control_operations.py index b419a713e9f..4935032e777 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_exposure_control_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_exposure_control_operations.py @@ -29,7 +29,7 @@ class ExposureControlOperations(object): instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models + :type models: ~azure.mgmt.datafactory.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -56,10 +56,10 @@ def get_feature_value( :param location_id: The location identifier. :type location_id: str :param exposure_control_request: The exposure control request. - :type exposure_control_request: ~data_factory_management_client.models.ExposureControlRequest + :type exposure_control_request: ~azure.mgmt.datafactory.models.ExposureControlRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: ExposureControlResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.ExposureControlResponse + :rtype: ~azure.mgmt.datafactory.models.ExposureControlResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ExposureControlResponse"] @@ -122,10 +122,10 @@ def get_feature_value_by_factory( :param factory_name: The factory name. :type factory_name: str :param exposure_control_request: The exposure control request. - :type exposure_control_request: ~data_factory_management_client.models.ExposureControlRequest + :type exposure_control_request: ~azure.mgmt.datafactory.models.ExposureControlRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: ExposureControlResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.ExposureControlResponse + :rtype: ~azure.mgmt.datafactory.models.ExposureControlResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ExposureControlResponse"] @@ -189,10 +189,10 @@ def query_feature_values_by_factory( :param factory_name: The factory name. :type factory_name: str :param exposure_control_batch_request: The exposure control request for list of features. - :type exposure_control_batch_request: ~data_factory_management_client.models.ExposureControlBatchRequest + :type exposure_control_batch_request: ~azure.mgmt.datafactory.models.ExposureControlBatchRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: ExposureControlBatchResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.ExposureControlBatchResponse + :rtype: ~azure.mgmt.datafactory.models.ExposureControlBatchResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ExposureControlBatchResponse"] diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_factories_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_factories_operations.py index 29d7d4af8a9..bce9885ad55 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_factories_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_factories_operations.py @@ -30,7 +30,7 @@ class FactoriesOperations(object): instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models + :type models: ~azure.mgmt.datafactory.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -54,7 +54,7 @@ def list( :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either FactoryListResponse or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.FactoryListResponse] + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.FactoryListResponse] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.FactoryListResponse"] @@ -124,10 +124,10 @@ def configure_factory_repo( :param location_id: The location identifier. :type location_id: str :param factory_repo_update: Update factory repo request definition. - :type factory_repo_update: ~data_factory_management_client.models.FactoryRepoUpdate + :type factory_repo_update: ~azure.mgmt.datafactory.models.FactoryRepoUpdate :keyword callable cls: A custom type or function that will be passed the direct response :return: Factory, or the result of cls(response) - :rtype: ~data_factory_management_client.models.Factory + :rtype: ~azure.mgmt.datafactory.models.Factory :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.Factory"] @@ -187,7 +187,7 @@ def list_by_resource_group( :type resource_group_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either FactoryListResponse or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.FactoryListResponse] + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.FactoryListResponse] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.FactoryListResponse"] @@ -262,13 +262,13 @@ def create_or_update( :param factory_name: The factory name. :type factory_name: str :param factory: Factory resource definition. - :type factory: ~data_factory_management_client.models.Factory + :type factory: ~azure.mgmt.datafactory.models.Factory :param if_match: ETag of the factory entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. :type if_match: str :keyword callable cls: A custom type or function that will be passed the direct response :return: Factory, or the result of cls(response) - :rtype: ~data_factory_management_client.models.Factory + :rtype: ~azure.mgmt.datafactory.models.Factory :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.Factory"] @@ -334,10 +334,10 @@ def update( :param factory_name: The factory name. :type factory_name: str :param factory_update_parameters: The parameters for updating a factory. - :type factory_update_parameters: ~data_factory_management_client.models.FactoryUpdateParameters + :type factory_update_parameters: ~azure.mgmt.datafactory.models.FactoryUpdateParameters :keyword callable cls: A custom type or function that will be passed the direct response :return: Factory, or the result of cls(response) - :rtype: ~data_factory_management_client.models.Factory + :rtype: ~azure.mgmt.datafactory.models.Factory :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.Factory"] @@ -405,7 +405,7 @@ def get( :type if_none_match: str :keyword callable cls: A custom type or function that will be passed the direct response :return: Factory, or the result of cls(response) - :rtype: ~data_factory_management_client.models.Factory or None + :rtype: ~azure.mgmt.datafactory.models.Factory or None :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.Factory"]] @@ -524,10 +524,10 @@ def get_git_hub_access_token( :param factory_name: The factory name. :type factory_name: str :param git_hub_access_token_request: Get GitHub access token request definition. - :type git_hub_access_token_request: ~data_factory_management_client.models.GitHubAccessTokenRequest + :type git_hub_access_token_request: ~azure.mgmt.datafactory.models.GitHubAccessTokenRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: GitHubAccessTokenResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.GitHubAccessTokenResponse + :rtype: ~azure.mgmt.datafactory.models.GitHubAccessTokenResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.GitHubAccessTokenResponse"] @@ -591,10 +591,10 @@ def get_data_plane_access( :param factory_name: The factory name. :type factory_name: str :param policy: Data Plane user access policy definition. - :type policy: ~data_factory_management_client.models.UserAccessPolicy + :type policy: ~azure.mgmt.datafactory.models.UserAccessPolicy :keyword callable cls: A custom type or function that will be passed the direct response :return: AccessPolicyResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.AccessPolicyResponse + :rtype: ~azure.mgmt.datafactory.models.AccessPolicyResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.AccessPolicyResponse"] diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_factory_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_factory_operations.py deleted file mode 100644 index 5b8622e97f9..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_factory_operations.py +++ /dev/null @@ -1,671 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class FactoryOperations(object): - """FactoryOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list( - self, - **kwargs # type: Any - ): - # type: (...) -> Iterable["models.FactoryListResponse"] - """Lists factories under the specified subscription. - - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either FactoryListResponse or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.FactoryListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.FactoryListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - def extract_data(pipeline_response): - deserialized = self._deserialize('FactoryListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/factories'} # type: ignore - - def configure_factory_repo( - self, - location_id, # type: str - factory_resource_id=None, # type: Optional[str] - repo_configuration=None, # type: Optional["models.FactoryRepoConfiguration"] - **kwargs # type: Any - ): - # type: (...) -> "models.Factory" - """Updates a factory's repo information. - - :param location_id: The location identifier. - :type location_id: str - :param factory_resource_id: The factory resource id. - :type factory_resource_id: str - :param repo_configuration: Git repo information of the factory. - :type repo_configuration: ~data_factory_management_client.models.FactoryRepoConfiguration - :keyword callable cls: A custom type or function that will be passed the direct response - :return: Factory, or the result of cls(response) - :rtype: ~data_factory_management_client.models.Factory - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Factory"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - factory_repo_update = models.FactoryRepoUpdate(factory_resource_id=factory_resource_id, repo_configuration=repo_configuration) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.configure_factory_repo.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'locationId': self._serialize.url("location_id", location_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(factory_repo_update, 'FactoryRepoUpdate') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('Factory', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - configure_factory_repo.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/locations/{locationId}/configureFactoryRepo'} # type: ignore - - def list_by_resource_group( - self, - resource_group_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Iterable["models.FactoryListResponse"] - """Lists factories. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either FactoryListResponse or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.FactoryListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.FactoryListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list_by_resource_group.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - def extract_data(pipeline_response): - deserialized = self._deserialize('FactoryListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged( - get_next, extract_data - ) - list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories'} # type: ignore - - def create_or_update( - self, - resource_group_name, # type: str - factory_name, # type: str - if_match=None, # type: Optional[str] - location=None, # type: Optional[str] - tags=None, # type: Optional[Dict[str, str]] - identity=None, # type: Optional["models.FactoryIdentity"] - repo_configuration=None, # type: Optional["models.FactoryRepoConfiguration"] - global_parameters=None, # type: Optional[Dict[str, "models.GlobalParameterSpecification"]] - **kwargs # type: Any - ): - # type: (...) -> "models.Factory" - """Creates or updates a factory. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param if_match: ETag of the factory entity. Should only be specified for update, for which it - should match existing entity or can be * for unconditional update. - :type if_match: str - :param location: The resource location. - :type location: str - :param tags: The resource tags. - :type tags: dict[str, str] - :param identity: Managed service identity of the factory. - :type identity: ~data_factory_management_client.models.FactoryIdentity - :param repo_configuration: Git repo information of the factory. - :type repo_configuration: ~data_factory_management_client.models.FactoryRepoConfiguration - :param global_parameters: List of parameters for factory. - :type global_parameters: dict[str, ~data_factory_management_client.models.GlobalParameterSpecification] - :keyword callable cls: A custom type or function that will be passed the direct response - :return: Factory, or the result of cls(response) - :rtype: ~data_factory_management_client.models.Factory - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Factory"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - factory = models.Factory(location=location, tags=tags, identity=identity, repo_configuration=repo_configuration, global_parameters=global_parameters) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(factory, 'Factory') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('Factory', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} # type: ignore - - def update( - self, - resource_group_name, # type: str - factory_name, # type: str - tags=None, # type: Optional[Dict[str, str]] - identity=None, # type: Optional["models.FactoryIdentity"] - **kwargs # type: Any - ): - # type: (...) -> "models.Factory" - """Updates a factory. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param tags: The resource tags. - :type tags: dict[str, str] - :param identity: Managed service identity of the factory. - :type identity: ~data_factory_management_client.models.FactoryIdentity - :keyword callable cls: A custom type or function that will be passed the direct response - :return: Factory, or the result of cls(response) - :rtype: ~data_factory_management_client.models.Factory - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Factory"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - factory_update_parameters = models.FactoryUpdateParameters(tags=tags, identity=identity) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(factory_update_parameters, 'FactoryUpdateParameters') - body_content_kwargs['content'] = body_content - request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('Factory', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} # type: ignore - - def get( - self, - resource_group_name, # type: str - factory_name, # type: str - if_none_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Optional["models.Factory"] - """Gets a factory. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param if_none_match: ETag of the factory entity. Should only be specified for get. If the ETag - matches the existing entity tag, or if * was provided, then no content will be returned. - :type if_none_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: Factory, or the result of cls(response) - :rtype: ~data_factory_management_client.models.Factory or None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.Factory"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 304]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('Factory', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} # type: ignore - - def delete( - self, - resource_group_name, # type: str - factory_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - """Deletes a factory. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} # type: ignore - - def get_git_hub_access_token( - self, - resource_group_name, # type: str - factory_name, # type: str - git_hub_access_code, # type: str - git_hub_access_token_base_url, # type: str - git_hub_client_id=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.GitHubAccessTokenResponse" - """Get GitHub Access Token. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param git_hub_access_code: GitHub access code. - :type git_hub_access_code: str - :param git_hub_access_token_base_url: GitHub access token base URL. - :type git_hub_access_token_base_url: str - :param git_hub_client_id: GitHub application client ID. - :type git_hub_client_id: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: GitHubAccessTokenResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.GitHubAccessTokenResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.GitHubAccessTokenResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - git_hub_access_token_request = models.GitHubAccessTokenRequest(git_hub_access_code=git_hub_access_code, git_hub_client_id=git_hub_client_id, git_hub_access_token_base_url=git_hub_access_token_base_url) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.get_git_hub_access_token.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(git_hub_access_token_request, 'GitHubAccessTokenRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('GitHubAccessTokenResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get_git_hub_access_token.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getGitHubAccessToken'} # type: ignore - - def get_data_plane_access( - self, - resource_group_name, # type: str - factory_name, # type: str - permissions=None, # type: Optional[str] - access_resource_path=None, # type: Optional[str] - profile_name=None, # type: Optional[str] - start_time=None, # type: Optional[str] - expire_time=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.AccessPolicyResponse" - """Get Data Plane access. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param permissions: The string with permissions for Data Plane access. Currently only 'r' is - supported which grants read only access. - :type permissions: str - :param access_resource_path: The resource path to get access relative to factory. Currently - only empty string is supported which corresponds to the factory resource. - :type access_resource_path: str - :param profile_name: The name of the profile. Currently only the default is supported. The - default value is DefaultProfile. - :type profile_name: str - :param start_time: Start time for the token. If not specified the current time will be used. - :type start_time: str - :param expire_time: Expiration time for the token. Maximum duration for the token is eight - hours and by default the token will expire in eight hours. - :type expire_time: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: AccessPolicyResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.AccessPolicyResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.AccessPolicyResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - policy = models.UserAccessPolicy(permissions=permissions, access_resource_path=access_resource_path, profile_name=profile_name, start_time=start_time, expire_time=expire_time) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.get_data_plane_access.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(policy, 'UserAccessPolicy') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('AccessPolicyResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get_data_plane_access.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getDataPlaneAccess'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_node_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_node_operations.py deleted file mode 100644 index a7903633080..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_node_operations.py +++ /dev/null @@ -1,309 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Optional, TypeVar - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class IntegrationRuntimeNodeOperations(object): - """IntegrationRuntimeNodeOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def get( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - node_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "models.SelfHostedIntegrationRuntimeNode" - """Gets a self-hosted integration runtime node. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param node_name: The integration runtime node name. - :type node_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: SelfHostedIntegrationRuntimeNode, or the result of cls(response) - :rtype: ~data_factory_management_client.models.SelfHostedIntegrationRuntimeNode - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.SelfHostedIntegrationRuntimeNode"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('SelfHostedIntegrationRuntimeNode', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}'} # type: ignore - - def delete( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - node_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - """Deletes a self-hosted integration runtime node. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param node_name: The integration runtime node name. - :type node_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}'} # type: ignore - - def update( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - node_name, # type: str - concurrent_jobs_limit=None, # type: Optional[int] - **kwargs # type: Any - ): - # type: (...) -> "models.SelfHostedIntegrationRuntimeNode" - """Updates a self-hosted integration runtime node. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param node_name: The integration runtime node name. - :type node_name: str - :param concurrent_jobs_limit: The number of concurrent jobs permitted to run on the integration - runtime node. Values between 1 and maxConcurrentJobs(inclusive) are allowed. - :type concurrent_jobs_limit: int - :keyword callable cls: A custom type or function that will be passed the direct response - :return: SelfHostedIntegrationRuntimeNode, or the result of cls(response) - :rtype: ~data_factory_management_client.models.SelfHostedIntegrationRuntimeNode - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.SelfHostedIntegrationRuntimeNode"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - update_integration_runtime_node_request = models.UpdateIntegrationRuntimeNodeRequest(concurrent_jobs_limit=concurrent_jobs_limit) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(update_integration_runtime_node_request, 'UpdateIntegrationRuntimeNodeRequest') - body_content_kwargs['content'] = body_content - request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('SelfHostedIntegrationRuntimeNode', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}'} # type: ignore - - def get_ip_address( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - node_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "models.IntegrationRuntimeNodeIpAddress" - """Get the IP address of self-hosted integration runtime node. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param node_name: The integration runtime node name. - :type node_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeNodeIpAddress, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeNodeIpAddress - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeNodeIpAddress"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get_ip_address.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('IntegrationRuntimeNodeIpAddress', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get_ip_address.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}/ipAddress'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_nodes_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_nodes_operations.py index c9623854aa9..6baf806e618 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_nodes_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_nodes_operations.py @@ -29,7 +29,7 @@ class IntegrationRuntimeNodesOperations(object): instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models + :type models: ~azure.mgmt.datafactory.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -65,7 +65,7 @@ def get( :type node_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: SelfHostedIntegrationRuntimeNode, or the result of cls(response) - :rtype: ~data_factory_management_client.models.SelfHostedIntegrationRuntimeNode + :rtype: ~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNode :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SelfHostedIntegrationRuntimeNode"] @@ -197,10 +197,10 @@ def update( :type node_name: str :param update_integration_runtime_node_request: The parameters for updating an integration runtime node. - :type update_integration_runtime_node_request: ~data_factory_management_client.models.UpdateIntegrationRuntimeNodeRequest + :type update_integration_runtime_node_request: ~azure.mgmt.datafactory.models.UpdateIntegrationRuntimeNodeRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: SelfHostedIntegrationRuntimeNode, or the result of cls(response) - :rtype: ~data_factory_management_client.models.SelfHostedIntegrationRuntimeNode + :rtype: ~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNode :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SelfHostedIntegrationRuntimeNode"] @@ -272,7 +272,7 @@ def get_ip_address( :type node_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: IntegrationRuntimeNodeIpAddress, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeNodeIpAddress + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeNodeIpAddress :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeNodeIpAddress"] diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_object_metadata_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_object_metadata_operations.py index a04018b467e..ee79d15a42f 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_object_metadata_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_object_metadata_operations.py @@ -31,7 +31,7 @@ class IntegrationRuntimeObjectMetadataOperations(object): instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models + :type models: ~azure.mgmt.datafactory.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -121,7 +121,7 @@ def begin_refresh( :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either SsisObjectMetadataStatusResponse or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[~data_factory_management_client.models.SsisObjectMetadataStatusResponse] + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datafactory.models.SsisObjectMetadataStatusResponse] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] @@ -190,10 +190,10 @@ def get( :param integration_runtime_name: The integration runtime name. :type integration_runtime_name: str :param get_metadata_request: The parameters for getting a SSIS object metadata. - :type get_metadata_request: ~data_factory_management_client.models.GetSsisObjectMetadataRequest + :type get_metadata_request: ~azure.mgmt.datafactory.models.GetSsisObjectMetadataRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: SsisObjectMetadataListResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.SsisObjectMetadataListResponse + :rtype: ~azure.mgmt.datafactory.models.SsisObjectMetadataListResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SsisObjectMetadataListResponse"] diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_operations.py deleted file mode 100644 index 1fb5fc6b30d..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_operations.py +++ /dev/null @@ -1,1198 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse -from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.mgmt.core.exceptions import ARMErrorFormat -from azure.mgmt.core.polling.arm_polling import ARMPolling - -from .. import models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class IntegrationRuntimeOperations(object): - """IntegrationRuntimeOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list_by_factory( - self, - resource_group_name, # type: str - factory_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Iterable["models.IntegrationRuntimeListResponse"] - """Lists integration runtimes. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either IntegrationRuntimeListResponse or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.IntegrationRuntimeListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - def extract_data(pipeline_response): - deserialized = self._deserialize('IntegrationRuntimeListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes'} # type: ignore - - def create_or_update( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - properties, # type: "models.IntegrationRuntime" - if_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.IntegrationRuntimeResource" - """Creates or updates an integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param properties: Integration runtime properties. - :type properties: ~data_factory_management_client.models.IntegrationRuntime - :param if_match: ETag of the integration runtime entity. Should only be specified for update, - for which it should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - integration_runtime = models.IntegrationRuntimeResource(properties=properties) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(integration_runtime, 'IntegrationRuntimeResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('IntegrationRuntimeResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore - - def get( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - if_none_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Optional["models.IntegrationRuntimeResource"] - """Gets an integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param if_none_match: ETag of the integration runtime entity. Should only be specified for get. - If the ETag matches the existing entity tag, or if * was provided, then no content will be - returned. - :type if_none_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeResource or None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.IntegrationRuntimeResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 304]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('IntegrationRuntimeResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore - - def update( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - auto_update=None, # type: Optional[Union[str, "models.IntegrationRuntimeAutoUpdate"]] - update_delay_offset=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.IntegrationRuntimeResource" - """Updates an integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param auto_update: Enables or disables the auto-update feature of the self-hosted integration - runtime. See https://go.microsoft.com/fwlink/?linkid=854189. - :type auto_update: str or ~data_factory_management_client.models.IntegrationRuntimeAutoUpdate - :param update_delay_offset: The time offset (in hours) in the day, e.g., PT03H is 3 hours. The - integration runtime auto update will happen on that time. - :type update_delay_offset: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - update_integration_runtime_request = models.UpdateIntegrationRuntimeRequest(auto_update=auto_update, update_delay_offset=update_delay_offset) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(update_integration_runtime_request, 'UpdateIntegrationRuntimeRequest') - body_content_kwargs['content'] = body_content - request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('IntegrationRuntimeResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore - - def delete( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - """Deletes an integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore - - def get_status( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "models.IntegrationRuntimeStatusResponse" - """Gets detailed status information for an integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeStatusResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeStatusResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get_status.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getStatus'} # type: ignore - - def get_connection_info( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "models.IntegrationRuntimeConnectionInfo" - """Gets the on-premises integration runtime connection information for encrypting the on-premises - data source credentials. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeConnectionInfo, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeConnectionInfo - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeConnectionInfo"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get_connection_info.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('IntegrationRuntimeConnectionInfo', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get_connection_info.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getConnectionInfo'} # type: ignore - - def regenerate_auth_key( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - key_name=None, # type: Optional[Union[str, "models.IntegrationRuntimeAuthKeyName"]] - **kwargs # type: Any - ): - # type: (...) -> "models.IntegrationRuntimeAuthKeys" - """Regenerates the authentication key for an integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param key_name: The name of the authentication key to regenerate. - :type key_name: str or ~data_factory_management_client.models.IntegrationRuntimeAuthKeyName - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeAuthKeys, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeAuthKeys - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeAuthKeys"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - regenerate_key_parameters = models.IntegrationRuntimeRegenerateKeyParameters(key_name=key_name) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.regenerate_auth_key.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(regenerate_key_parameters, 'IntegrationRuntimeRegenerateKeyParameters') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('IntegrationRuntimeAuthKeys', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - regenerate_auth_key.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/regenerateAuthKey'} # type: ignore - - def list_auth_key( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "models.IntegrationRuntimeAuthKeys" - """Retrieves the authentication keys for an integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeAuthKeys, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeAuthKeys - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeAuthKeys"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.list_auth_key.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('IntegrationRuntimeAuthKeys', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - list_auth_key.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/listAuthKeys'} # type: ignore - - def _start_initial( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Optional["models.IntegrationRuntimeStatusResponse"] - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.IntegrationRuntimeStatusResponse"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self._start_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - _start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/start'} # type: ignore - - def begin_start( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller["models.IntegrationRuntimeStatusResponse"] - """Starts a ManagedReserved type integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of LROPoller that returns either IntegrationRuntimeStatusResponse or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[~data_factory_management_client.models.IntegrationRuntimeStatusResponse] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = self._start_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - integration_runtime_name=integration_runtime_name, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling - if cont_token: - return LROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/start'} # type: ignore - - def _stop_initial( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self._stop_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - _stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/stop'} # type: ignore - - def begin_stop( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] - """Stops a ManagedReserved type integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of LROPoller that returns either None or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = self._stop_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - integration_runtime_name=integration_runtime_name, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - if cls: - return cls(pipeline_response, None, {}) - - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling - if cont_token: - return LROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/stop'} # type: ignore - - def sync_credentials( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - """Force the integration runtime to synchronize credentials across integration runtime nodes, and - this will override the credentials across all worker nodes with those available on the - dispatcher node. If you already have the latest credential backup file, you should manually - import it (preferred) on any self-hosted integration runtime node than using this API directly. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.sync_credentials.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - sync_credentials.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/syncCredentials'} # type: ignore - - def get_monitoring_data( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "models.IntegrationRuntimeMonitoringData" - """Get the integration runtime monitoring data, which includes the monitor data for all the nodes - under this integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeMonitoringData, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeMonitoringData - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeMonitoringData"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get_monitoring_data.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('IntegrationRuntimeMonitoringData', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get_monitoring_data.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/monitoringData'} # type: ignore - - def upgrade( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - """Upgrade self-hosted integration runtime to latest version if availability. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.upgrade.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - upgrade.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/upgrade'} # type: ignore - - def remove_link( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - linked_factory_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - """Remove all linked integration runtimes under specific data factory in a self-hosted integration - runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param linked_factory_name: The data factory name for linked integration runtime. - :type linked_factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - linked_integration_runtime_request = models.LinkedIntegrationRuntimeRequest(linked_factory_name=linked_factory_name) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.remove_link.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(linked_integration_runtime_request, 'LinkedIntegrationRuntimeRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - remove_link.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/removeLinks'} # type: ignore - - def create_linked_integration_runtime( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - name=None, # type: Optional[str] - subscription_id=None, # type: Optional[str] - data_factory_name=None, # type: Optional[str] - data_factory_location=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.IntegrationRuntimeStatusResponse" - """Create a linked integration runtime entry in a shared integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param name: The name of the linked integration runtime. - :type name: str - :param subscription_id: The ID of the subscription that the linked integration runtime belongs - to. - :type subscription_id: str - :param data_factory_name: The name of the data factory that the linked integration runtime - belongs to. - :type data_factory_name: str - :param data_factory_location: The location of the data factory that the linked integration - runtime belongs to. - :type data_factory_location: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeStatusResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeStatusResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - create_linked_integration_runtime_request = models.CreateLinkedIntegrationRuntimeRequest(name=name, subscription_id=subscription_id, data_factory_name=data_factory_name, data_factory_location=data_factory_location) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_linked_integration_runtime.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(create_linked_integration_runtime_request, 'CreateLinkedIntegrationRuntimeRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_linked_integration_runtime.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/linkedIntegrationRuntime'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtimes_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtimes_operations.py index d0a57313403..b4521ba7818 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtimes_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtimes_operations.py @@ -32,7 +32,7 @@ class IntegrationRuntimesOperations(object): instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models + :type models: ~azure.mgmt.datafactory.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -62,7 +62,7 @@ def list_by_factory( :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either IntegrationRuntimeListResponse or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.IntegrationRuntimeListResponse] + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.IntegrationRuntimeListResponse] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeListResponse"] @@ -141,13 +141,13 @@ def create_or_update( :param integration_runtime_name: The integration runtime name. :type integration_runtime_name: str :param integration_runtime: Integration runtime resource definition. - :type integration_runtime: ~data_factory_management_client.models.IntegrationRuntimeResource + :type integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeResource :param if_match: ETag of the integration runtime entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. :type if_match: str :keyword callable cls: A custom type or function that will be passed the direct response :return: IntegrationRuntimeResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeResource + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeResource :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeResource"] @@ -222,7 +222,7 @@ def get( :type if_none_match: str :keyword callable cls: A custom type or function that will be passed the direct response :return: IntegrationRuntimeResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeResource or None + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeResource or None :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.IntegrationRuntimeResource"]] @@ -289,10 +289,10 @@ def update( :param integration_runtime_name: The integration runtime name. :type integration_runtime_name: str :param update_integration_runtime_request: The parameters for updating an integration runtime. - :type update_integration_runtime_request: ~data_factory_management_client.models.UpdateIntegrationRuntimeRequest + :type update_integration_runtime_request: ~azure.mgmt.datafactory.models.UpdateIntegrationRuntimeRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: IntegrationRuntimeResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeResource + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeResource :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeResource"] @@ -420,7 +420,7 @@ def get_status( :type integration_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: IntegrationRuntimeStatusResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeStatusResponse + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"] @@ -465,6 +465,69 @@ def get_status( return deserialized get_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getStatus'} # type: ignore + def list_outbound_network_dependencies_endpoints( + self, + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse" + """Gets the list of outbound network dependencies for a given Azure-SSIS integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse, or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.list_outbound_network_dependencies_endpoints.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list_outbound_network_dependencies_endpoints.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/outboundNetworkDependenciesEndpoints'} # type: ignore + def get_connection_info( self, resource_group_name, # type: str @@ -484,7 +547,7 @@ def get_connection_info( :type integration_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: IntegrationRuntimeConnectionInfo, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeConnectionInfo + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeConnectionInfo :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeConnectionInfo"] @@ -548,10 +611,10 @@ def regenerate_auth_key( :type integration_runtime_name: str :param regenerate_key_parameters: The parameters for regenerating integration runtime authentication key. - :type regenerate_key_parameters: ~data_factory_management_client.models.IntegrationRuntimeRegenerateKeyParameters + :type regenerate_key_parameters: ~azure.mgmt.datafactory.models.IntegrationRuntimeRegenerateKeyParameters :keyword callable cls: A custom type or function that will be passed the direct response :return: IntegrationRuntimeAuthKeys, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeAuthKeys + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeAuthKeys :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeAuthKeys"] @@ -619,7 +682,7 @@ def list_auth_keys( :type integration_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: IntegrationRuntimeAuthKeys, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeAuthKeys + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeAuthKeys :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeAuthKeys"] @@ -739,7 +802,7 @@ def begin_start( :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either IntegrationRuntimeStatusResponse or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[~data_factory_management_client.models.IntegrationRuntimeStatusResponse] + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] @@ -988,7 +1051,7 @@ def get_monitoring_data( :type integration_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: IntegrationRuntimeMonitoringData, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeMonitoringData + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeMonitoringData :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeMonitoringData"] @@ -1113,7 +1176,7 @@ def remove_links( :type integration_runtime_name: str :param linked_integration_runtime_request: The data factory name for the linked integration runtime. - :type linked_integration_runtime_request: ~data_factory_management_client.models.LinkedIntegrationRuntimeRequest + :type linked_integration_runtime_request: ~azure.mgmt.datafactory.models.LinkedIntegrationRuntimeRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: None, or the result of cls(response) :rtype: None @@ -1181,10 +1244,10 @@ def create_linked_integration_runtime( :param integration_runtime_name: The integration runtime name. :type integration_runtime_name: str :param create_linked_integration_runtime_request: The linked integration runtime properties. - :type create_linked_integration_runtime_request: ~data_factory_management_client.models.CreateLinkedIntegrationRuntimeRequest + :type create_linked_integration_runtime_request: ~azure.mgmt.datafactory.models.CreateLinkedIntegrationRuntimeRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: IntegrationRuntimeStatusResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeStatusResponse + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"] diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_linked_service_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_linked_service_operations.py deleted file mode 100644 index 7124cb588eb..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_linked_service_operations.py +++ /dev/null @@ -1,320 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class LinkedServiceOperations(object): - """LinkedServiceOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list_by_factory( - self, - resource_group_name, # type: str - factory_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Iterable["models.LinkedServiceListResponse"] - """Lists linked services. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either LinkedServiceListResponse or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.LinkedServiceListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - def extract_data(pipeline_response): - deserialized = self._deserialize('LinkedServiceListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices'} # type: ignore - - def create_or_update( - self, - resource_group_name, # type: str - factory_name, # type: str - linked_service_name, # type: str - properties, # type: "models.LinkedService" - if_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.LinkedServiceResource" - """Creates or updates a linked service. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param linked_service_name: The linked service name. - :type linked_service_name: str - :param properties: Properties of linked service. - :type properties: ~data_factory_management_client.models.LinkedService - :param if_match: ETag of the linkedService entity. Should only be specified for update, for - which it should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: LinkedServiceResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.LinkedServiceResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - linked_service = models.LinkedServiceResource(properties=properties) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(linked_service, 'LinkedServiceResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('LinkedServiceResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}'} # type: ignore - - def get( - self, - resource_group_name, # type: str - factory_name, # type: str - linked_service_name, # type: str - if_none_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Optional["models.LinkedServiceResource"] - """Gets a linked service. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param linked_service_name: The linked service name. - :type linked_service_name: str - :param if_none_match: ETag of the linked service entity. Should only be specified for get. If - the ETag matches the existing entity tag, or if * was provided, then no content will be - returned. - :type if_none_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: LinkedServiceResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.LinkedServiceResource or None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.LinkedServiceResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 304]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('LinkedServiceResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}'} # type: ignore - - def delete( - self, - resource_group_name, # type: str - factory_name, # type: str - linked_service_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - """Deletes a linked service. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param linked_service_name: The linked service name. - :type linked_service_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_linked_services_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_linked_services_operations.py index ffb243da168..58d7f1f344d 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_linked_services_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_linked_services_operations.py @@ -30,7 +30,7 @@ class LinkedServicesOperations(object): instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models + :type models: ~azure.mgmt.datafactory.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -60,7 +60,7 @@ def list_by_factory( :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either LinkedServiceListResponse or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.LinkedServiceListResponse] + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.LinkedServiceListResponse] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceListResponse"] @@ -139,13 +139,13 @@ def create_or_update( :param linked_service_name: The linked service name. :type linked_service_name: str :param linked_service: Linked service resource definition. - :type linked_service: ~data_factory_management_client.models.LinkedServiceResource + :type linked_service: ~azure.mgmt.datafactory.models.LinkedServiceResource :param if_match: ETag of the linkedService entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. :type if_match: str :keyword callable cls: A custom type or function that will be passed the direct response :return: LinkedServiceResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.LinkedServiceResource + :rtype: ~azure.mgmt.datafactory.models.LinkedServiceResource :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceResource"] @@ -220,7 +220,7 @@ def get( :type if_none_match: str :keyword callable cls: A custom type or function that will be passed the direct response :return: LinkedServiceResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.LinkedServiceResource or None + :rtype: ~azure.mgmt.datafactory.models.LinkedServiceResource or None :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.LinkedServiceResource"]] diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_private_endpoint_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_private_endpoint_operations.py deleted file mode 100644 index 29be0bd0e6d..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_private_endpoint_operations.py +++ /dev/null @@ -1,344 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, List, Optional, TypeVar - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class ManagedPrivateEndpointOperations(object): - """ManagedPrivateEndpointOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list_by_factory( - self, - resource_group_name, # type: str - factory_name, # type: str - managed_virtual_network_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Iterable["models.ManagedPrivateEndpointListResponse"] - """Lists managed private endpoints. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param managed_virtual_network_name: Managed virtual network name. - :type managed_virtual_network_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either ManagedPrivateEndpointListResponse or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.ManagedPrivateEndpointListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - def extract_data(pipeline_response): - deserialized = self._deserialize('ManagedPrivateEndpointListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints'} # type: ignore - - def create_or_update( - self, - resource_group_name, # type: str - factory_name, # type: str - managed_virtual_network_name, # type: str - managed_private_endpoint_name, # type: str - if_match=None, # type: Optional[str] - connection_state=None, # type: Optional["models.ConnectionStateProperties"] - fqdns=None, # type: Optional[List[str]] - group_id=None, # type: Optional[str] - private_link_resource_id=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.ManagedPrivateEndpointResource" - """Creates or updates a managed private endpoint. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param managed_virtual_network_name: Managed virtual network name. - :type managed_virtual_network_name: str - :param managed_private_endpoint_name: Managed private endpoint name. - :type managed_private_endpoint_name: str - :param if_match: ETag of the managed private endpoint entity. Should only be specified for - update, for which it should match existing entity or can be * for unconditional update. - :type if_match: str - :param connection_state: The managed private endpoint connection state. - :type connection_state: ~data_factory_management_client.models.ConnectionStateProperties - :param fqdns: Fully qualified domain names. - :type fqdns: list[str] - :param group_id: The groupId to which the managed private endpoint is created. - :type group_id: str - :param private_link_resource_id: The ARM resource ID of the resource to which the managed - private endpoint is created. - :type private_link_resource_id: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ManagedPrivateEndpointResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.ManagedPrivateEndpointResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - managed_private_endpoint = models.ManagedPrivateEndpointResource(connection_state=connection_state, fqdns=fqdns, group_id=group_id, private_link_resource_id=private_link_resource_id) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - 'managedPrivateEndpointName': self._serialize.url("managed_private_endpoint_name", managed_private_endpoint_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(managed_private_endpoint, 'ManagedPrivateEndpointResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('ManagedPrivateEndpointResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}'} # type: ignore - - def get( - self, - resource_group_name, # type: str - factory_name, # type: str - managed_virtual_network_name, # type: str - managed_private_endpoint_name, # type: str - if_none_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.ManagedPrivateEndpointResource" - """Gets a managed private endpoint. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param managed_virtual_network_name: Managed virtual network name. - :type managed_virtual_network_name: str - :param managed_private_endpoint_name: Managed private endpoint name. - :type managed_private_endpoint_name: str - :param if_none_match: ETag of the managed private endpoint entity. Should only be specified for - get. If the ETag matches the existing entity tag, or if * was provided, then no content will be - returned. - :type if_none_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ManagedPrivateEndpointResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.ManagedPrivateEndpointResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - 'managedPrivateEndpointName': self._serialize.url("managed_private_endpoint_name", managed_private_endpoint_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('ManagedPrivateEndpointResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}'} # type: ignore - - def delete( - self, - resource_group_name, # type: str - factory_name, # type: str - managed_virtual_network_name, # type: str - managed_private_endpoint_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - """Deletes a managed private endpoint. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param managed_virtual_network_name: Managed virtual network name. - :type managed_virtual_network_name: str - :param managed_private_endpoint_name: Managed private endpoint name. - :type managed_private_endpoint_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - 'managedPrivateEndpointName': self._serialize.url("managed_private_endpoint_name", managed_private_endpoint_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_private_endpoints_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_private_endpoints_operations.py index d1c7c89531f..a8340467285 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_private_endpoints_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_private_endpoints_operations.py @@ -30,7 +30,7 @@ class ManagedPrivateEndpointsOperations(object): instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models + :type models: ~azure.mgmt.datafactory.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -63,7 +63,7 @@ def list_by_factory( :type managed_virtual_network_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either ManagedPrivateEndpointListResponse or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.ManagedPrivateEndpointListResponse] + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.ManagedPrivateEndpointListResponse] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointListResponse"] @@ -146,13 +146,13 @@ def create_or_update( :param managed_private_endpoint_name: Managed private endpoint name. :type managed_private_endpoint_name: str :param managed_private_endpoint: Managed private endpoint resource definition. - :type managed_private_endpoint: ~data_factory_management_client.models.ManagedPrivateEndpointResource + :type managed_private_endpoint: ~azure.mgmt.datafactory.models.ManagedPrivateEndpointResource :param if_match: ETag of the managed private endpoint entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. :type if_match: str :keyword callable cls: A custom type or function that will be passed the direct response :return: ManagedPrivateEndpointResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.ManagedPrivateEndpointResource + :rtype: ~azure.mgmt.datafactory.models.ManagedPrivateEndpointResource :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointResource"] @@ -231,7 +231,7 @@ def get( :type if_none_match: str :keyword callable cls: A custom type or function that will be passed the direct response :return: ManagedPrivateEndpointResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.ManagedPrivateEndpointResource + :rtype: ~azure.mgmt.datafactory.models.ManagedPrivateEndpointResource :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointResource"] diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_virtual_network_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_virtual_network_operations.py deleted file mode 100644 index fa043ca3e59..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_virtual_network_operations.py +++ /dev/null @@ -1,262 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class ManagedVirtualNetworkOperations(object): - """ManagedVirtualNetworkOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list_by_factory( - self, - resource_group_name, # type: str - factory_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Iterable["models.ManagedVirtualNetworkListResponse"] - """Lists managed Virtual Networks. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either ManagedVirtualNetworkListResponse or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.ManagedVirtualNetworkListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - def extract_data(pipeline_response): - deserialized = self._deserialize('ManagedVirtualNetworkListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks'} # type: ignore - - def create_or_update( - self, - resource_group_name, # type: str - factory_name, # type: str - managed_virtual_network_name, # type: str - properties, # type: "models.ManagedVirtualNetwork" - if_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.ManagedVirtualNetworkResource" - """Creates or updates a managed Virtual Network. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param managed_virtual_network_name: Managed virtual network name. - :type managed_virtual_network_name: str - :param properties: Managed Virtual Network properties. - :type properties: ~data_factory_management_client.models.ManagedVirtualNetwork - :param if_match: ETag of the managed Virtual Network entity. Should only be specified for - update, for which it should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ManagedVirtualNetworkResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.ManagedVirtualNetworkResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - managed_virtual_network = models.ManagedVirtualNetworkResource(properties=properties) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(managed_virtual_network, 'ManagedVirtualNetworkResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('ManagedVirtualNetworkResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}'} # type: ignore - - def get( - self, - resource_group_name, # type: str - factory_name, # type: str - managed_virtual_network_name, # type: str - if_none_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.ManagedVirtualNetworkResource" - """Gets a managed Virtual Network. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param managed_virtual_network_name: Managed virtual network name. - :type managed_virtual_network_name: str - :param if_none_match: ETag of the managed Virtual Network entity. Should only be specified for - get. If the ETag matches the existing entity tag, or if * was provided, then no content will be - returned. - :type if_none_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ManagedVirtualNetworkResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.ManagedVirtualNetworkResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('ManagedVirtualNetworkResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_virtual_networks_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_virtual_networks_operations.py index 8f81cdf0c80..b5c877cea9d 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_virtual_networks_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_virtual_networks_operations.py @@ -30,7 +30,7 @@ class ManagedVirtualNetworksOperations(object): instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models + :type models: ~azure.mgmt.datafactory.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -60,7 +60,7 @@ def list_by_factory( :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either ManagedVirtualNetworkListResponse or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.ManagedVirtualNetworkListResponse] + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.ManagedVirtualNetworkListResponse] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkListResponse"] @@ -139,13 +139,13 @@ def create_or_update( :param managed_virtual_network_name: Managed virtual network name. :type managed_virtual_network_name: str :param managed_virtual_network: Managed Virtual Network resource definition. - :type managed_virtual_network: ~data_factory_management_client.models.ManagedVirtualNetworkResource + :type managed_virtual_network: ~azure.mgmt.datafactory.models.ManagedVirtualNetworkResource :param if_match: ETag of the managed Virtual Network entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. :type if_match: str :keyword callable cls: A custom type or function that will be passed the direct response :return: ManagedVirtualNetworkResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.ManagedVirtualNetworkResource + :rtype: ~azure.mgmt.datafactory.models.ManagedVirtualNetworkResource :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkResource"] @@ -220,7 +220,7 @@ def get( :type if_none_match: str :keyword callable cls: A custom type or function that will be passed the direct response :return: ManagedVirtualNetworkResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.ManagedVirtualNetworkResource + :rtype: ~azure.mgmt.datafactory.models.ManagedVirtualNetworkResource :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkResource"] diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_operation_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_operation_operations.py deleted file mode 100644 index c5cf3d43f6d..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_operation_operations.py +++ /dev/null @@ -1,106 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class OperationOperations(object): - """OperationOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list( - self, - **kwargs # type: Any - ): - # type: (...) -> Iterable["models.OperationListResponse"] - """Lists the available Azure Data Factory API operations. - - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either OperationListResponse or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.OperationListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.OperationListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list.metadata['url'] # type: ignore - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - def extract_data(pipeline_response): - deserialized = self._deserialize('OperationListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': '/providers/Microsoft.DataFactory/operations'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_operations.py index 9795a6e8c4e..567165c2570 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_operations.py @@ -30,7 +30,7 @@ class Operations(object): instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models + :type models: ~azure.mgmt.datafactory.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -54,7 +54,7 @@ def list( :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either OperationListResponse or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.OperationListResponse] + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.OperationListResponse] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.OperationListResponse"] diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_operations.py deleted file mode 100644 index d82f423f2cb..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_operations.py +++ /dev/null @@ -1,414 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class PipelineOperations(object): - """PipelineOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list_by_factory( - self, - resource_group_name, # type: str - factory_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Iterable["models.PipelineListResponse"] - """Lists pipelines. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either PipelineListResponse or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.PipelineListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - def extract_data(pipeline_response): - deserialized = self._deserialize('PipelineListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines'} # type: ignore - - def create_or_update( - self, - resource_group_name, # type: str - factory_name, # type: str - pipeline_name, # type: str - pipeline, # type: "models.PipelineResource" - if_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.PipelineResource" - """Creates or updates a pipeline. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param pipeline_name: The pipeline name. - :type pipeline_name: str - :param pipeline: Pipeline resource definition. - :type pipeline: ~data_factory_management_client.models.PipelineResource - :param if_match: ETag of the pipeline entity. Should only be specified for update, for which - it should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: PipelineResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.PipelineResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(pipeline, 'PipelineResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('PipelineResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}'} # type: ignore - - def get( - self, - resource_group_name, # type: str - factory_name, # type: str - pipeline_name, # type: str - if_none_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Optional["models.PipelineResource"] - """Gets a pipeline. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param pipeline_name: The pipeline name. - :type pipeline_name: str - :param if_none_match: ETag of the pipeline entity. Should only be specified for get. If the - ETag matches the existing entity tag, or if * was provided, then no content will be returned. - :type if_none_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: PipelineResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.PipelineResource or None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.PipelineResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 304]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('PipelineResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}'} # type: ignore - - def delete( - self, - resource_group_name, # type: str - factory_name, # type: str - pipeline_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - """Deletes a pipeline. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param pipeline_name: The pipeline name. - :type pipeline_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}'} # type: ignore - - def create_run( - self, - resource_group_name, # type: str - factory_name, # type: str - pipeline_name, # type: str - reference_pipeline_run_id=None, # type: Optional[str] - is_recovery=None, # type: Optional[bool] - start_activity_name=None, # type: Optional[str] - start_from_failure=None, # type: Optional[bool] - parameters=None, # type: Optional[Dict[str, object]] - **kwargs # type: Any - ): - # type: (...) -> "models.CreateRunResponse" - """Creates a run of a pipeline. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param pipeline_name: The pipeline name. - :type pipeline_name: str - :param reference_pipeline_run_id: The pipeline run identifier. If run ID is specified the - parameters of the specified run will be used to create a new run. - :type reference_pipeline_run_id: str - :param is_recovery: Recovery mode flag. If recovery mode is set to true, the specified - referenced pipeline run and the new run will be grouped under the same groupId. - :type is_recovery: bool - :param start_activity_name: In recovery mode, the rerun will start from this activity. If not - specified, all activities will run. - :type start_activity_name: str - :param start_from_failure: In recovery mode, if set to true, the rerun will start from failed - activities. The property will be used only if startActivityName is not specified. - :type start_from_failure: bool - :param parameters: Parameters of the pipeline run. These parameters will be used only if the - runId is not specified. - :type parameters: dict[str, object] - :keyword callable cls: A custom type or function that will be passed the direct response - :return: CreateRunResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.CreateRunResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.CreateRunResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_run.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - if reference_pipeline_run_id is not None: - query_parameters['referencePipelineRunId'] = self._serialize.query("reference_pipeline_run_id", reference_pipeline_run_id, 'str') - if is_recovery is not None: - query_parameters['isRecovery'] = self._serialize.query("is_recovery", is_recovery, 'bool') - if start_activity_name is not None: - query_parameters['startActivityName'] = self._serialize.query("start_activity_name", start_activity_name, 'str') - if start_from_failure is not None: - query_parameters['startFromFailure'] = self._serialize.query("start_from_failure", start_from_failure, 'bool') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - if parameters is not None: - body_content = self._serialize.body(parameters, '{object}') - else: - body_content = None - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('CreateRunResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_run.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}/createRun'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_run_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_run_operations.py deleted file mode 100644 index 75634fde5ac..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_run_operations.py +++ /dev/null @@ -1,250 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -import datetime -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class PipelineRunOperations(object): - """PipelineRunOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def query_by_factory( - self, - resource_group_name, # type: str - factory_name, # type: str - last_updated_after, # type: datetime.datetime - last_updated_before, # type: datetime.datetime - continuation_token_parameter=None, # type: Optional[str] - filters=None, # type: Optional[List["models.RunQueryFilter"]] - order_by=None, # type: Optional[List["models.RunQueryOrderBy"]] - **kwargs # type: Any - ): - # type: (...) -> "models.PipelineRunsQueryResponse" - """Query pipeline runs in the factory based on input filter conditions. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param last_updated_after: The time at or after which the run event was updated in 'ISO 8601' - format. - :type last_updated_after: ~datetime.datetime - :param last_updated_before: The time at or before which the run event was updated in 'ISO 8601' - format. - :type last_updated_before: ~datetime.datetime - :param continuation_token_parameter: The continuation token for getting the next page of - results. Null for first page. - :type continuation_token_parameter: str - :param filters: List of filters. - :type filters: list[~data_factory_management_client.models.RunQueryFilter] - :param order_by: List of OrderBy option. - :type order_by: list[~data_factory_management_client.models.RunQueryOrderBy] - :keyword callable cls: A custom type or function that will be passed the direct response - :return: PipelineRunsQueryResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.PipelineRunsQueryResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineRunsQueryResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - filter_parameters = models.RunFilterParameters(continuation_token=continuation_token_parameter, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, order_by=order_by) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.query_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('PipelineRunsQueryResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryPipelineRuns'} # type: ignore - - def get( - self, - resource_group_name, # type: str - factory_name, # type: str - run_id, # type: str - **kwargs # type: Any - ): - # type: (...) -> "models.PipelineRun" - """Get a pipeline run by its run ID. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param run_id: The pipeline run identifier. - :type run_id: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: PipelineRun, or the result of cls(response) - :rtype: ~data_factory_management_client.models.PipelineRun - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineRun"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'runId': self._serialize.url("run_id", run_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('PipelineRun', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}'} # type: ignore - - def cancel( - self, - resource_group_name, # type: str - factory_name, # type: str - run_id, # type: str - is_recursive=None, # type: Optional[bool] - **kwargs # type: Any - ): - # type: (...) -> None - """Cancel a pipeline run by its run ID. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param run_id: The pipeline run identifier. - :type run_id: str - :param is_recursive: If true, cancel all the Child pipelines that are triggered by the current - pipeline. - :type is_recursive: bool - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.cancel.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'runId': self._serialize.url("run_id", run_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if is_recursive is not None: - query_parameters['isRecursive'] = self._serialize.query("is_recursive", is_recursive, 'bool') - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - cancel.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}/cancel'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_runs_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_runs_operations.py index be684c71f0a..d8142b7ad24 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_runs_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_runs_operations.py @@ -29,7 +29,7 @@ class PipelineRunsOperations(object): instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models + :type models: ~azure.mgmt.datafactory.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -59,10 +59,10 @@ def query_by_factory( :param factory_name: The factory name. :type factory_name: str :param filter_parameters: Parameters to filter the pipeline run. - :type filter_parameters: ~data_factory_management_client.models.RunFilterParameters + :type filter_parameters: ~azure.mgmt.datafactory.models.RunFilterParameters :keyword callable cls: A custom type or function that will be passed the direct response :return: PipelineRunsQueryResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.PipelineRunsQueryResponse + :rtype: ~azure.mgmt.datafactory.models.PipelineRunsQueryResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineRunsQueryResponse"] @@ -129,7 +129,7 @@ def get( :type run_id: str :keyword callable cls: A custom type or function that will be passed the direct response :return: PipelineRun, or the result of cls(response) - :rtype: ~data_factory_management_client.models.PipelineRun + :rtype: ~azure.mgmt.datafactory.models.PipelineRun :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineRun"] diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipelines_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipelines_operations.py index d4a5594d606..36b7bc04188 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipelines_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipelines_operations.py @@ -30,7 +30,7 @@ class PipelinesOperations(object): instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models + :type models: ~azure.mgmt.datafactory.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -60,7 +60,7 @@ def list_by_factory( :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either PipelineListResponse or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.PipelineListResponse] + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.PipelineListResponse] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineListResponse"] @@ -139,13 +139,13 @@ def create_or_update( :param pipeline_name: The pipeline name. :type pipeline_name: str :param pipeline: Pipeline resource definition. - :type pipeline: ~data_factory_management_client.models.PipelineResource + :type pipeline: ~azure.mgmt.datafactory.models.PipelineResource :param if_match: ETag of the pipeline entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. :type if_match: str :keyword callable cls: A custom type or function that will be passed the direct response :return: PipelineResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.PipelineResource + :rtype: ~azure.mgmt.datafactory.models.PipelineResource :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineResource"] @@ -219,7 +219,7 @@ def get( :type if_none_match: str :keyword callable cls: A custom type or function that will be passed the direct response :return: PipelineResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.PipelineResource or None + :rtype: ~azure.mgmt.datafactory.models.PipelineResource or None :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.PipelineResource"]] @@ -366,7 +366,7 @@ def create_run( :type parameters: dict[str, object] :keyword callable cls: A custom type or function that will be passed the direct response :return: CreateRunResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.CreateRunResponse + :rtype: ~azure.mgmt.datafactory.models.CreateRunResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.CreateRunResponse"] diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_private_end_point_connections_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_private_end_point_connections_operations.py index 11471ac9d41..74cb51bb807 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_private_end_point_connections_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_private_end_point_connections_operations.py @@ -30,7 +30,7 @@ class PrivateEndPointConnectionsOperations(object): instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models + :type models: ~azure.mgmt.datafactory.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -60,7 +60,7 @@ def list_by_factory( :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either PrivateEndpointConnectionListResponse or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.PrivateEndpointConnectionListResponse] + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.PrivateEndpointConnectionListResponse] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpointConnectionListResponse"] diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_private_endpoint_connection_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_private_endpoint_connection_operations.py index 60bd6a37157..240258361e3 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_private_endpoint_connection_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_private_endpoint_connection_operations.py @@ -29,7 +29,7 @@ class PrivateEndpointConnectionOperations(object): instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models + :type models: ~azure.mgmt.datafactory.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -63,13 +63,13 @@ def create_or_update( :param private_endpoint_connection_name: The private endpoint connection name. :type private_endpoint_connection_name: str :param private_endpoint_wrapper: - :type private_endpoint_wrapper: ~data_factory_management_client.models.PrivateLinkConnectionApprovalRequestResource + :type private_endpoint_wrapper: ~azure.mgmt.datafactory.models.PrivateLinkConnectionApprovalRequestResource :param if_match: ETag of the private endpoint connection entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. :type if_match: str :keyword callable cls: A custom type or function that will be passed the direct response :return: PrivateEndpointConnectionResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.PrivateEndpointConnectionResource + :rtype: ~azure.mgmt.datafactory.models.PrivateEndpointConnectionResource :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpointConnectionResource"] @@ -144,7 +144,7 @@ def get( :type if_none_match: str :keyword callable cls: A custom type or function that will be passed the direct response :return: PrivateEndpointConnectionResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.PrivateEndpointConnectionResource + :rtype: ~azure.mgmt.datafactory.models.PrivateEndpointConnectionResource :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpointConnectionResource"] diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_private_link_resources_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_private_link_resources_operations.py index 89847585015..de519f218fa 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_private_link_resources_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_private_link_resources_operations.py @@ -29,7 +29,7 @@ class PrivateLinkResourcesOperations(object): instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models + :type models: ~azure.mgmt.datafactory.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -59,7 +59,7 @@ def get( :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: PrivateLinkResourcesWrapper, or the result of cls(response) - :rtype: ~data_factory_management_client.models.PrivateLinkResourcesWrapper + :rtype: ~azure.mgmt.datafactory.models.PrivateLinkResourcesWrapper :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateLinkResourcesWrapper"] diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_operations.py deleted file mode 100644 index 142f32f2c31..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_operations.py +++ /dev/null @@ -1,895 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse -from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.mgmt.core.exceptions import ARMErrorFormat -from azure.mgmt.core.polling.arm_polling import ARMPolling - -from .. import models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class TriggerOperations(object): - """TriggerOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list_by_factory( - self, - resource_group_name, # type: str - factory_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Iterable["models.TriggerListResponse"] - """Lists triggers. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either TriggerListResponse or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.TriggerListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - def extract_data(pipeline_response): - deserialized = self._deserialize('TriggerListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers'} # type: ignore - - def query_by_factory( - self, - resource_group_name, # type: str - factory_name, # type: str - continuation_token_parameter=None, # type: Optional[str] - parent_trigger_name=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.TriggerQueryResponse" - """Query triggers. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param continuation_token_parameter: The continuation token for getting the next page of - results. Null for first page. - :type continuation_token_parameter: str - :param parent_trigger_name: The name of the parent TumblingWindowTrigger to get the child rerun - triggers. - :type parent_trigger_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: TriggerQueryResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.TriggerQueryResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerQueryResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - filter_parameters = models.TriggerFilterParameters(continuation_token=continuation_token_parameter, parent_trigger_name=parent_trigger_name) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.query_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(filter_parameters, 'TriggerFilterParameters') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('TriggerQueryResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/querytriggers'} # type: ignore - - def create_or_update( - self, - resource_group_name, # type: str - factory_name, # type: str - trigger_name, # type: str - properties, # type: "models.Trigger" - if_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.TriggerResource" - """Creates or updates a trigger. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :param properties: Properties of the trigger. - :type properties: ~data_factory_management_client.models.Trigger - :param if_match: ETag of the trigger entity. Should only be specified for update, for which it - should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: TriggerResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.TriggerResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - trigger = models.TriggerResource(properties=properties) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(trigger, 'TriggerResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('TriggerResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}'} # type: ignore - - def get( - self, - resource_group_name, # type: str - factory_name, # type: str - trigger_name, # type: str - if_none_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Optional["models.TriggerResource"] - """Gets a trigger. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :param if_none_match: ETag of the trigger entity. Should only be specified for get. If the ETag - matches the existing entity tag, or if * was provided, then no content will be returned. - :type if_none_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: TriggerResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.TriggerResource or None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 304]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('TriggerResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}'} # type: ignore - - def delete( - self, - resource_group_name, # type: str - factory_name, # type: str - trigger_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - """Deletes a trigger. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}'} # type: ignore - - def _subscribe_to_event_initial( - self, - resource_group_name, # type: str - factory_name, # type: str - trigger_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Optional["models.TriggerSubscriptionOperationStatus"] - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerSubscriptionOperationStatus"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self._subscribe_to_event_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - _subscribe_to_event_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents'} # type: ignore - - def begin_subscribe_to_event( - self, - resource_group_name, # type: str - factory_name, # type: str - trigger_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller["models.TriggerSubscriptionOperationStatus"] - """Subscribe event trigger to events. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of LROPoller that returns either TriggerSubscriptionOperationStatus or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[~data_factory_management_client.models.TriggerSubscriptionOperationStatus] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = self._subscribe_to_event_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - trigger_name=trigger_name, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling - if cont_token: - return LROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_subscribe_to_event.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents'} # type: ignore - - def get_event_subscription_status( - self, - resource_group_name, # type: str - factory_name, # type: str - trigger_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "models.TriggerSubscriptionOperationStatus" - """Get a trigger's event subscription status. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: TriggerSubscriptionOperationStatus, or the result of cls(response) - :rtype: ~data_factory_management_client.models.TriggerSubscriptionOperationStatus - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get_event_subscription_status.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get_event_subscription_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/getEventSubscriptionStatus'} # type: ignore - - def _unsubscribe_from_event_initial( - self, - resource_group_name, # type: str - factory_name, # type: str - trigger_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Optional["models.TriggerSubscriptionOperationStatus"] - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerSubscriptionOperationStatus"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self._unsubscribe_from_event_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - _unsubscribe_from_event_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents'} # type: ignore - - def begin_unsubscribe_from_event( - self, - resource_group_name, # type: str - factory_name, # type: str - trigger_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller["models.TriggerSubscriptionOperationStatus"] - """Unsubscribe event trigger from events. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of LROPoller that returns either TriggerSubscriptionOperationStatus or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[~data_factory_management_client.models.TriggerSubscriptionOperationStatus] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = self._unsubscribe_from_event_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - trigger_name=trigger_name, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling - if cont_token: - return LROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_unsubscribe_from_event.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents'} # type: ignore - - def _start_initial( - self, - resource_group_name, # type: str - factory_name, # type: str - trigger_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self._start_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - _start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/start'} # type: ignore - - def begin_start( - self, - resource_group_name, # type: str - factory_name, # type: str - trigger_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] - """Starts a trigger. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of LROPoller that returns either None or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = self._start_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - trigger_name=trigger_name, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - if cls: - return cls(pipeline_response, None, {}) - - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling - if cont_token: - return LROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/start'} # type: ignore - - def _stop_initial( - self, - resource_group_name, # type: str - factory_name, # type: str - trigger_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self._stop_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - _stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/stop'} # type: ignore - - def begin_stop( - self, - resource_group_name, # type: str - factory_name, # type: str - trigger_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] - """Stops a trigger. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of LROPoller that returns either None or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = self._stop_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - trigger_name=trigger_name, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - if cls: - return cls(pipeline_response, None, {}) - - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling - if cont_token: - return LROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/stop'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_run_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_run_operations.py deleted file mode 100644 index 3290d8196ab..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_run_operations.py +++ /dev/null @@ -1,248 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -import datetime -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class TriggerRunOperations(object): - """TriggerRunOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def rerun( - self, - resource_group_name, # type: str - factory_name, # type: str - trigger_name, # type: str - run_id, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - """Rerun single trigger instance by runId. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :param run_id: The pipeline run identifier. - :type run_id: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.rerun.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - 'runId': self._serialize.url("run_id", run_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - rerun.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/triggerRuns/{runId}/rerun'} # type: ignore - - def cancel( - self, - resource_group_name, # type: str - factory_name, # type: str - trigger_name, # type: str - run_id, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - """Cancel a single trigger instance by runId. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :param run_id: The pipeline run identifier. - :type run_id: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.cancel.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - 'runId': self._serialize.url("run_id", run_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - cancel.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/triggerRuns/{runId}/cancel'} # type: ignore - - def query_by_factory( - self, - resource_group_name, # type: str - factory_name, # type: str - last_updated_after, # type: datetime.datetime - last_updated_before, # type: datetime.datetime - continuation_token_parameter=None, # type: Optional[str] - filters=None, # type: Optional[List["models.RunQueryFilter"]] - order_by=None, # type: Optional[List["models.RunQueryOrderBy"]] - **kwargs # type: Any - ): - # type: (...) -> "models.TriggerRunsQueryResponse" - """Query trigger runs. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param last_updated_after: The time at or after which the run event was updated in 'ISO 8601' - format. - :type last_updated_after: ~datetime.datetime - :param last_updated_before: The time at or before which the run event was updated in 'ISO 8601' - format. - :type last_updated_before: ~datetime.datetime - :param continuation_token_parameter: The continuation token for getting the next page of - results. Null for first page. - :type continuation_token_parameter: str - :param filters: List of filters. - :type filters: list[~data_factory_management_client.models.RunQueryFilter] - :param order_by: List of OrderBy option. - :type order_by: list[~data_factory_management_client.models.RunQueryOrderBy] - :keyword callable cls: A custom type or function that will be passed the direct response - :return: TriggerRunsQueryResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.TriggerRunsQueryResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerRunsQueryResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - filter_parameters = models.RunFilterParameters(continuation_token=continuation_token_parameter, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, order_by=order_by) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.query_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('TriggerRunsQueryResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryTriggerRuns'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_runs_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_runs_operations.py index ca2b12d4a29..0e3bb6b4abe 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_runs_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_runs_operations.py @@ -29,7 +29,7 @@ class TriggerRunsOperations(object): instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models + :type models: ~azure.mgmt.datafactory.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -187,10 +187,10 @@ def query_by_factory( :param factory_name: The factory name. :type factory_name: str :param filter_parameters: Parameters to filter the pipeline run. - :type filter_parameters: ~data_factory_management_client.models.RunFilterParameters + :type filter_parameters: ~azure.mgmt.datafactory.models.RunFilterParameters :keyword callable cls: A custom type or function that will be passed the direct response :return: TriggerRunsQueryResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.TriggerRunsQueryResponse + :rtype: ~azure.mgmt.datafactory.models.TriggerRunsQueryResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerRunsQueryResponse"] diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_triggers_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_triggers_operations.py index f85d33b9c68..a4506ab40d9 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_triggers_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_triggers_operations.py @@ -32,7 +32,7 @@ class TriggersOperations(object): instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models + :type models: ~azure.mgmt.datafactory.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -62,7 +62,7 @@ def list_by_factory( :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either TriggerListResponse or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.TriggerListResponse] + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.TriggerListResponse] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerListResponse"] @@ -137,10 +137,10 @@ def query_by_factory( :param factory_name: The factory name. :type factory_name: str :param filter_parameters: Parameters to filter the triggers. - :type filter_parameters: ~data_factory_management_client.models.TriggerFilterParameters + :type filter_parameters: ~azure.mgmt.datafactory.models.TriggerFilterParameters :keyword callable cls: A custom type or function that will be passed the direct response :return: TriggerQueryResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.TriggerQueryResponse + :rtype: ~azure.mgmt.datafactory.models.TriggerQueryResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerQueryResponse"] @@ -208,13 +208,13 @@ def create_or_update( :param trigger_name: The trigger name. :type trigger_name: str :param trigger: Trigger resource definition. - :type trigger: ~data_factory_management_client.models.TriggerResource + :type trigger: ~azure.mgmt.datafactory.models.TriggerResource :param if_match: ETag of the trigger entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. :type if_match: str :keyword callable cls: A custom type or function that will be passed the direct response :return: TriggerResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.TriggerResource + :rtype: ~azure.mgmt.datafactory.models.TriggerResource :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerResource"] @@ -288,7 +288,7 @@ def get( :type if_none_match: str :keyword callable cls: A custom type or function that will be passed the direct response :return: TriggerResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.TriggerResource or None + :rtype: ~azure.mgmt.datafactory.models.TriggerResource or None :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerResource"]] @@ -472,7 +472,7 @@ def begin_subscribe_to_events( :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either TriggerSubscriptionOperationStatus or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[~data_factory_management_client.models.TriggerSubscriptionOperationStatus] + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datafactory.models.TriggerSubscriptionOperationStatus] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] @@ -540,7 +540,7 @@ def get_event_subscription_status( :type trigger_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: TriggerSubscriptionOperationStatus, or the result of cls(response) - :rtype: ~data_factory_management_client.models.TriggerSubscriptionOperationStatus + :rtype: ~azure.mgmt.datafactory.models.TriggerSubscriptionOperationStatus :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"] @@ -660,7 +660,7 @@ def begin_unsubscribe_from_events( :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either TriggerSubscriptionOperationStatus or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[~data_factory_management_client.models.TriggerSubscriptionOperationStatus] + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datafactory.models.TriggerSubscriptionOperationStatus] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/setup.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/setup.py new file mode 100644 index 00000000000..489e9c4c502 --- /dev/null +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/setup.py @@ -0,0 +1,37 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# coding: utf-8 + +from setuptools import setup, find_packages + +NAME = "azure-mgmt-datafactory" +VERSION = "1.0.0" + +# To install the library, run the following +# +# python setup.py install +# +# prerequisite: setuptools +# http://pypi.python.org/pypi/setuptools + +REQUIRES = ["msrest>=0.6.18", "azure-core<2.0.0,>=1.8.2", "azure-mgmt-core<2.0.0,>=1.2.1"] + +setup( + name=NAME, + version=VERSION, + description="azure-mgmt-datafactory", + author_email="", + url="", + keywords=["Swagger", "DataFactoryManagementClient"], + install_requires=REQUIRES, + packages=find_packages(), + include_package_data=True, + long_description="""\ + The Azure Data Factory V2 management API provides a RESTful set of web services that interact with Azure Data Factory V2 services. + """ +) diff --git a/src/datafactory/gen.zip b/src/datafactory/gen.zip deleted file mode 100644 index 296cd2dfd07..00000000000 Binary files a/src/datafactory/gen.zip and /dev/null differ diff --git a/src/datafactory/report.md b/src/datafactory/report.md index 1d9bdfb4cf1..f7cac3328fb 100644 --- a/src/datafactory/report.md +++ b/src/datafactory/report.md @@ -10,13 +10,15 @@ |CLI Command Group|Group Swagger name|Commands| |---------|------------|--------| |az datafactory|Factories|[commands](#CommandsInFactories)| +|az datafactory activity-run|ActivityRuns|[commands](#CommandsInActivityRuns)| +|az datafactory dataset|Datasets|[commands](#CommandsInDatasets)| |az datafactory integration-runtime|IntegrationRuntimes|[commands](#CommandsInIntegrationRuntimes)| |az datafactory integration-runtime-node|IntegrationRuntimeNodes|[commands](#CommandsInIntegrationRuntimeNodes)| |az datafactory linked-service|LinkedServices|[commands](#CommandsInLinkedServices)| -|az datafactory dataset|Datasets|[commands](#CommandsInDatasets)| +|az datafactory managed-private-endpoint|ManagedPrivateEndpoints|[commands](#CommandsInManagedPrivateEndpoints)| +|az datafactory managed-virtual-network|ManagedVirtualNetworks|[commands](#CommandsInManagedVirtualNetworks)| |az datafactory pipeline|Pipelines|[commands](#CommandsInPipelines)| |az datafactory pipeline-run|PipelineRuns|[commands](#CommandsInPipelineRuns)| -|az datafactory activity-run|ActivityRuns|[commands](#CommandsInActivityRuns)| |az datafactory trigger|Triggers|[commands](#CommandsInTriggers)| |az datafactory trigger-run|TriggerRuns|[commands](#CommandsInTriggerRuns)| @@ -45,7 +47,7 @@ |[az datafactory dataset list](#DatasetsListByFactory)|ListByFactory|[Parameters](#ParametersDatasetsListByFactory)|[Example](#ExamplesDatasetsListByFactory)| |[az datafactory dataset show](#DatasetsGet)|Get|[Parameters](#ParametersDatasetsGet)|[Example](#ExamplesDatasetsGet)| |[az datafactory dataset create](#DatasetsCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersDatasetsCreateOrUpdate#Create)|[Example](#ExamplesDatasetsCreateOrUpdate#Create)| -|[az datafactory dataset update](#DatasetsCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersDatasetsCreateOrUpdate#Update)|[Example](#ExamplesDatasetsCreateOrUpdate#Update)| +|[az datafactory dataset update](#DatasetsCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersDatasetsCreateOrUpdate#Update)|Not Found| |[az datafactory dataset delete](#DatasetsDelete)|Delete|[Parameters](#ParametersDatasetsDelete)|[Example](#ExamplesDatasetsDelete)| ### Commands in `az datafactory integration-runtime` group @@ -83,9 +85,26 @@ |[az datafactory linked-service list](#LinkedServicesListByFactory)|ListByFactory|[Parameters](#ParametersLinkedServicesListByFactory)|[Example](#ExamplesLinkedServicesListByFactory)| |[az datafactory linked-service show](#LinkedServicesGet)|Get|[Parameters](#ParametersLinkedServicesGet)|[Example](#ExamplesLinkedServicesGet)| |[az datafactory linked-service create](#LinkedServicesCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersLinkedServicesCreateOrUpdate#Create)|[Example](#ExamplesLinkedServicesCreateOrUpdate#Create)| -|[az datafactory linked-service update](#LinkedServicesCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersLinkedServicesCreateOrUpdate#Update)|[Example](#ExamplesLinkedServicesCreateOrUpdate#Update)| +|[az datafactory linked-service update](#LinkedServicesCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersLinkedServicesCreateOrUpdate#Update)|Not Found| |[az datafactory linked-service delete](#LinkedServicesDelete)|Delete|[Parameters](#ParametersLinkedServicesDelete)|[Example](#ExamplesLinkedServicesDelete)| +### Commands in `az datafactory managed-private-endpoint` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az datafactory managed-private-endpoint list](#ManagedPrivateEndpointsListByFactory)|ListByFactory|[Parameters](#ParametersManagedPrivateEndpointsListByFactory)|[Example](#ExamplesManagedPrivateEndpointsListByFactory)| +|[az datafactory managed-private-endpoint show](#ManagedPrivateEndpointsGet)|Get|[Parameters](#ParametersManagedPrivateEndpointsGet)|[Example](#ExamplesManagedPrivateEndpointsGet)| +|[az datafactory managed-private-endpoint create](#ManagedPrivateEndpointsCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersManagedPrivateEndpointsCreateOrUpdate#Create)|[Example](#ExamplesManagedPrivateEndpointsCreateOrUpdate#Create)| +|[az datafactory managed-private-endpoint update](#ManagedPrivateEndpointsCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersManagedPrivateEndpointsCreateOrUpdate#Update)|Not Found| +|[az datafactory managed-private-endpoint delete](#ManagedPrivateEndpointsDelete)|Delete|[Parameters](#ParametersManagedPrivateEndpointsDelete)|[Example](#ExamplesManagedPrivateEndpointsDelete)| + +### Commands in `az datafactory managed-virtual-network` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az datafactory managed-virtual-network list](#ManagedVirtualNetworksListByFactory)|ListByFactory|[Parameters](#ParametersManagedVirtualNetworksListByFactory)|[Example](#ExamplesManagedVirtualNetworksListByFactory)| +|[az datafactory managed-virtual-network show](#ManagedVirtualNetworksGet)|Get|[Parameters](#ParametersManagedVirtualNetworksGet)|[Example](#ExamplesManagedVirtualNetworksGet)| +|[az datafactory managed-virtual-network create](#ManagedVirtualNetworksCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersManagedVirtualNetworksCreateOrUpdate#Create)|[Example](#ExamplesManagedVirtualNetworksCreateOrUpdate#Create)| +|[az datafactory managed-virtual-network update](#ManagedVirtualNetworksCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersManagedVirtualNetworksCreateOrUpdate#Update)|Not Found| + ### Commands in `az datafactory pipeline` group |CLI Command|Operation Swagger name|Parameters|Examples| |---------|------------|--------|-----------| @@ -109,7 +128,7 @@ |[az datafactory trigger list](#TriggersListByFactory)|ListByFactory|[Parameters](#ParametersTriggersListByFactory)|[Example](#ExamplesTriggersListByFactory)| |[az datafactory trigger show](#TriggersGet)|Get|[Parameters](#ParametersTriggersGet)|[Example](#ExamplesTriggersGet)| |[az datafactory trigger create](#TriggersCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersTriggersCreateOrUpdate#Create)|[Example](#ExamplesTriggersCreateOrUpdate#Create)| -|[az datafactory trigger update](#TriggersCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersTriggersCreateOrUpdate#Update)|[Example](#ExamplesTriggersCreateOrUpdate#Update)| +|[az datafactory trigger update](#TriggersCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersTriggersCreateOrUpdate#Update)|Not Found| |[az datafactory trigger delete](#TriggersDelete)|Delete|[Parameters](#ParametersTriggersDelete)|[Example](#ExamplesTriggersDelete)| |[az datafactory trigger get-event-subscription-status](#TriggersGetEventSubscriptionStatus)|GetEventSubscriptionStatus|[Parameters](#ParametersTriggersGetEventSubscriptionStatus)|[Example](#ExamplesTriggersGetEventSubscriptionStatus)| |[az datafactory trigger query-by-factory](#TriggersQueryByFactory)|QueryByFactory|[Parameters](#ParametersTriggersQueryByFactory)|[Example](#ExamplesTriggersQueryByFactory)| @@ -127,7 +146,6 @@ ## COMMAND DETAILS - ### group `az datafactory` #### Command `az datafactory list` @@ -149,6 +167,7 @@ az datafactory list ##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| + #### Command `az datafactory show` ##### Example @@ -324,26 +343,20 @@ pression\\",\\"value\\":\\"@dataset().MyFolderPath\\"}}}" --name "exampleDataset |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--dataset-name**|string|The dataset name.|dataset_name|datasetName| -|**--properties**|object|Dataset properties.|properties|properties| |**--if-match**|string|ETag of the dataset entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| +|**--properties**|object|Dataset properties.|properties|properties| #### Command `az datafactory dataset update` -##### Example -``` -az datafactory dataset update --description "Example description" --linked-service-name "{\\"type\\":\\"LinkedServiceRe\ -ference\\",\\"referenceName\\":\\"exampleLinkedService\\"}" --parameters "{\\"MyFileName\\":{\\"type\\":\\"String\\"},\ -\\"MyFolderPath\\":{\\"type\\":\\"String\\"}}" --name "exampleDataset" --factory-name "exampleFactoryName" \ ---resource-group "exampleResourceGroup" -``` + ##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--dataset-name**|string|The dataset name.|dataset_name|datasetName| -|**--linked-service-name**|object|Linked service reference.|linked_service_name|linkedServiceName| |**--if-match**|string|ETag of the dataset entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| +|**--linked-service-name**|object|Linked service reference.|linked_service_name|linkedServiceName| |**--description**|string|Dataset description.|description|description| |**--structure**|any|Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement.|structure|structure| |**--schema**|any|Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement.|schema|schema| @@ -415,6 +428,7 @@ az datafactory integration-runtime linked-integration-runtime create --name "bfa #### Command `az datafactory integration-runtime managed create` + ##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| @@ -720,16 +734,12 @@ ps;AccountName=examplestorageaccount;AccountKey=\\"}}}" --name "exa |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--linked-service-name**|string|The linked service name.|linked_service_name|linkedServiceName| -|**--properties**|object|Properties of linked service.|properties|properties| |**--if-match**|string|ETag of the linkedService entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| +|**--properties**|object|Properties of linked service.|properties|properties| #### Command `az datafactory linked-service update` -##### Example -``` -az datafactory linked-service update --factory-name "exampleFactoryName" --description "Example description" --name \ -"exampleLinkedService" --resource-group "exampleResourceGroup" -``` + ##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| @@ -756,6 +766,144 @@ az datafactory linked-service delete --factory-name "exampleFactoryName" --name |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--linked-service-name**|string|The linked service name.|linked_service_name|linkedServiceName| +### group `az datafactory managed-private-endpoint` +#### Command `az datafactory managed-private-endpoint list` + +##### Example +``` +az datafactory managed-private-endpoint list --factory-name "exampleFactoryName" --managed-virtual-network-name \ +"exampleManagedVirtualNetworkName" --resource-group "exampleResourceGroup" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| +|**--factory-name**|string|The factory name.|factory_name|factoryName| +|**--managed-virtual-network-name**|string|Managed virtual network name|managed_virtual_network_name|managedVirtualNetworkName| + +#### Command `az datafactory managed-private-endpoint show` + +##### Example +``` +az datafactory managed-private-endpoint show --factory-name "exampleFactoryName" --name "exampleManagedPrivateEndpointN\ +ame" --managed-virtual-network-name "exampleManagedVirtualNetworkName" --resource-group "exampleResourceGroup" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| +|**--factory-name**|string|The factory name.|factory_name|factoryName| +|**--managed-virtual-network-name**|string|Managed virtual network name|managed_virtual_network_name|managedVirtualNetworkName| +|**--managed-private-endpoint-name**|string|Managed private endpoint name|managed_private_endpoint_name|managedPrivateEndpointName| +|**--if-none-match**|string|ETag of the managed private endpoint entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned.|if_none_match|If-None-Match| + +#### Command `az datafactory managed-private-endpoint create` + +##### Example +``` +az datafactory managed-private-endpoint create --factory-name "exampleFactoryName" --group-id "blob" \ +--private-link-resource-id "/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/prov\ +iders/Microsoft.Storage/storageAccounts/exampleBlobStorage" --name "exampleManagedPrivateEndpointName" \ +--managed-virtual-network-name "exampleManagedVirtualNetworkName" --resource-group "exampleResourceGroup" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| +|**--factory-name**|string|The factory name.|factory_name|factoryName| +|**--managed-virtual-network-name**|string|Managed virtual network name|managed_virtual_network_name|managedVirtualNetworkName| +|**--managed-private-endpoint-name**|string|Managed private endpoint name|managed_private_endpoint_name|managedPrivateEndpointName| +|**--if-match**|string|ETag of the managed private endpoint entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| +|**--fqdns**|array|Fully qualified domain names|fqdns|fqdns| +|**--group-id**|string|The groupId to which the managed private endpoint is created|group_id|groupId| +|**--private-link-resource-id**|string|The ARM resource ID of the resource to which the managed private endpoint is created|private_link_resource_id|privateLinkResourceId| + +#### Command `az datafactory managed-private-endpoint update` + + +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| +|**--factory-name**|string|The factory name.|factory_name|factoryName| +|**--managed-virtual-network-name**|string|Managed virtual network name|managed_virtual_network_name|managedVirtualNetworkName| +|**--managed-private-endpoint-name**|string|Managed private endpoint name|managed_private_endpoint_name|managedPrivateEndpointName| +|**--if-match**|string|ETag of the managed private endpoint entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| +|**--fqdns**|array|Fully qualified domain names|fqdns|fqdns| +|**--group-id**|string|The groupId to which the managed private endpoint is created|group_id|groupId| +|**--private-link-resource-id**|string|The ARM resource ID of the resource to which the managed private endpoint is created|private_link_resource_id|privateLinkResourceId| + +#### Command `az datafactory managed-private-endpoint delete` + +##### Example +``` +az datafactory managed-private-endpoint delete --factory-name "exampleFactoryName" --name \ +"exampleManagedPrivateEndpointName" --managed-virtual-network-name "exampleManagedVirtualNetworkName" --resource-group \ +"exampleResourceGroup" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| +|**--factory-name**|string|The factory name.|factory_name|factoryName| +|**--managed-virtual-network-name**|string|Managed virtual network name|managed_virtual_network_name|managedVirtualNetworkName| +|**--managed-private-endpoint-name**|string|Managed private endpoint name|managed_private_endpoint_name|managedPrivateEndpointName| + +### group `az datafactory managed-virtual-network` +#### Command `az datafactory managed-virtual-network list` + +##### Example +``` +az datafactory managed-virtual-network list --factory-name "exampleFactoryName" --resource-group \ +"exampleResourceGroup" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| +|**--factory-name**|string|The factory name.|factory_name|factoryName| + +#### Command `az datafactory managed-virtual-network show` + +##### Example +``` +az datafactory managed-virtual-network show --factory-name "exampleFactoryName" --name "exampleManagedVirtualNetworkNam\ +e" --resource-group "exampleResourceGroup" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| +|**--factory-name**|string|The factory name.|factory_name|factoryName| +|**--managed-virtual-network-name**|string|Managed virtual network name|managed_virtual_network_name|managedVirtualNetworkName| +|**--if-none-match**|string|ETag of the managed Virtual Network entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned.|if_none_match|If-None-Match| + +#### Command `az datafactory managed-virtual-network create` + +##### Example +``` +az datafactory managed-virtual-network create --factory-name "exampleFactoryName" --name \ +"exampleManagedVirtualNetworkName" --resource-group "exampleResourceGroup" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| +|**--factory-name**|string|The factory name.|factory_name|factoryName| +|**--managed-virtual-network-name**|string|Managed virtual network name|managed_virtual_network_name|managedVirtualNetworkName| +|**--if-match**|string|ETag of the managed Virtual Network entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| + +#### Command `az datafactory managed-virtual-network update` + + +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| +|**--factory-name**|string|The factory name.|factory_name|factoryName| +|**--managed-virtual-network-name**|string|Managed virtual network name|managed_virtual_network_name|managedVirtualNetworkName| +|**--if-match**|string|ETag of the managed Virtual Network entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| + ### group `az datafactory pipeline` #### Command `az datafactory pipeline list` @@ -807,8 +955,8 @@ es\\":{\\"TestVariableArray\\":{\\"type\\":\\"Array\\"}},\\"runDimensions\\":{\\ |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--pipeline-name**|string|The pipeline name.|pipeline_name|pipelineName| -|**--pipeline**|object|Pipeline resource definition.|pipeline|pipeline| |**--if-match**|string|ETag of the pipeline entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| +|**--pipeline**|object|Pipeline resource definition.|pipeline|pipeline| #### Command `az datafactory pipeline update` @@ -968,16 +1116,12 @@ perties\\":{\\"recurrence\\":{\\"endTime\\":\\"2018-06-16T00:55:13.8441801Z\\",\ |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--trigger-name**|string|The trigger name.|trigger_name|triggerName| -|**--properties**|object|Properties of the trigger.|properties|properties| |**--if-match**|string|ETag of the trigger entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| +|**--properties**|object|Properties of the trigger.|properties|properties| #### Command `az datafactory trigger update` -##### Example -``` -az datafactory trigger update --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" \ ---description "Example description" --name "exampleTrigger" -``` + ##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------|