Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 4 additions & 14 deletions azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,9 +26,9 @@
from .integration_runtime_status_list_response import IntegrationRuntimeStatusListResponse
from .update_integration_runtime_request import UpdateIntegrationRuntimeRequest
from .update_integration_runtime_node_request import UpdateIntegrationRuntimeNodeRequest
from .parameter_specification import ParameterSpecification
from .linked_service import LinkedService
from .linked_service_resource import LinkedServiceResource
from .parameter_specification import ParameterSpecification
from .dataset import Dataset
from .dataset_resource import DatasetResource
from .activity_dependency import ActivityDependency
Expand Down Expand Up @@ -59,8 +59,6 @@
from .operation_list_response import OperationListResponse
from .azure_data_lake_analytics_linked_service import AzureDataLakeAnalyticsLinkedService
from .hd_insight_on_demand_linked_service import HDInsightOnDemandLinkedService
from .netezza_linked_service import NetezzaLinkedService
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why is this removed?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Because @haricr fork is not up to date to latest master, and we build from the branch, not from the state that would be the PR after merge.

See, 18 commits behind:
image

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

That's an interesting scenario indeed for the automation system.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Either way, we need to block the merge of this PR until we release 0.5.0. So please do not merge this.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@hvermis two things:

  • I'm working right now to improve the system and play with git to be smarter
  • The base of this PR is restapi_auto_datafactory/resource-manager, not master. I added a layer of branches. This PR will be merged automatically when the Swagger will be merged. The one you don't want to merge too early is the cumulative PR that contains all your work in progress:
    [AutoPR] restapi_auto_datafactory/resource-manager #1972

Am I clear?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I have approved #1972 and we want that one merged, but not the latest changes in this one that have only Hari's update - Filteractivity, parameter in Linked service etc.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I you don't merge a Swagger that you don't want to release we're good :)
That's the process anyway, since merging a Swagger means ready to release for other languages.

from .vertica_linked_service import VerticaLinkedService
from .zoho_linked_service import ZohoLinkedService
from .xero_linked_service import XeroLinkedService
from .square_linked_service import SquareLinkedService
Expand Down Expand Up @@ -128,8 +126,6 @@
from .sql_server_linked_service import SqlServerLinkedService
from .azure_sql_dw_linked_service import AzureSqlDWLinkedService
from .azure_storage_linked_service import AzureStorageLinkedService
from .vertica_table_dataset import VerticaTableDataset
from .netezza_table_dataset import NetezzaTableDataset
from .zoho_object_dataset import ZohoObjectDataset
from .xero_object_dataset import XeroObjectDataset
from .square_object_dataset import SquareObjectDataset
Expand Down Expand Up @@ -208,8 +204,6 @@
from .web_activity import WebActivity
from .redshift_unload_settings import RedshiftUnloadSettings
from .amazon_redshift_source import AmazonRedshiftSource
from .vertica_source import VerticaSource
from .netezza_source import NetezzaSource
from .zoho_source import ZohoSource
from .xero_source import XeroSource
from .square_source import SquareSource
Expand Down Expand Up @@ -291,6 +285,7 @@
from .copy_sink import CopySink
from .copy_activity import CopyActivity
from .execution_activity import ExecutionActivity
from .filter_activity import FilterActivity
from .until_activity import UntilActivity
from .wait_activity import WaitActivity
from .for_each_activity import ForEachActivity
Expand Down Expand Up @@ -407,9 +402,9 @@
'IntegrationRuntimeStatusListResponse',
'UpdateIntegrationRuntimeRequest',
'UpdateIntegrationRuntimeNodeRequest',
'ParameterSpecification',
'LinkedService',
'LinkedServiceResource',
'ParameterSpecification',
'Dataset',
'DatasetResource',
'ActivityDependency',
Expand Down Expand Up @@ -440,8 +435,6 @@
'OperationListResponse',
'AzureDataLakeAnalyticsLinkedService',
'HDInsightOnDemandLinkedService',
'NetezzaLinkedService',
'VerticaLinkedService',
'ZohoLinkedService',
'XeroLinkedService',
'SquareLinkedService',
Expand Down Expand Up @@ -509,8 +502,6 @@
'SqlServerLinkedService',
'AzureSqlDWLinkedService',
'AzureStorageLinkedService',
'VerticaTableDataset',
'NetezzaTableDataset',
'ZohoObjectDataset',
'XeroObjectDataset',
'SquareObjectDataset',
Expand Down Expand Up @@ -589,8 +580,6 @@
'WebActivity',
'RedshiftUnloadSettings',
'AmazonRedshiftSource',
'VerticaSource',
'NetezzaSource',
'ZohoSource',
'XeroSource',
'SquareSource',
Expand Down Expand Up @@ -672,6 +661,7 @@
'CopySink',
'CopyActivity',
'ExecutionActivity',
'FilterActivity',
'UntilActivity',
'WaitActivity',
'ForEachActivity',
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,9 @@ class ActivityPolicy(Model):
:param retry_interval_in_seconds: Interval between each retry attempt (in
seconds). The default is 30 sec.
:type retry_interval_in_seconds: int
:param secure_output: When set to true, Output from activity is considered
as secure and will not be logged to monitoring.
:type secure_output: bool
"""

_validation = {
Expand All @@ -39,11 +42,13 @@ class ActivityPolicy(Model):
'timeout': {'key': 'timeout', 'type': 'object'},
'retry': {'key': 'retry', 'type': 'object'},
'retry_interval_in_seconds': {'key': 'retryIntervalInSeconds', 'type': 'int'},
'secure_output': {'key': 'secureOutput', 'type': 'bool'},
}

def __init__(self, additional_properties=None, timeout=None, retry=None, retry_interval_in_seconds=None):
def __init__(self, additional_properties=None, timeout=None, retry=None, retry_interval_in_seconds=None, secure_output=None):
super(ActivityPolicy, self).__init__()
self.additional_properties = additional_properties
self.timeout = timeout
self.retry = retry
self.retry_interval_in_seconds = retry_interval_in_seconds
self.secure_output = secure_output
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,12 @@ class AmazonMWSLinkedService(LinkedService):
~azure.mgmt.datafactory.models.IntegrationRuntimeReference
:param description: Linked service description.
:type description: str
:param parameters: Parameters for linked service.
:type parameters: dict[str,
~azure.mgmt.datafactory.models.ParameterSpecification]
:param annotations: List of tags that can be used for describing the
Dataset.
:type annotations: list[object]
:param type: Constant filled by server.
:type type: str
:param endpoint: The endpoint of the Amazon MWS server, (i.e.
Expand Down Expand Up @@ -68,6 +74,8 @@ class AmazonMWSLinkedService(LinkedService):
'additional_properties': {'key': '', 'type': '{object}'},
'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'},
'description': {'key': 'description', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'},
'annotations': {'key': 'annotations', 'type': '[object]'},
'type': {'key': 'type', 'type': 'str'},
'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'},
'marketplace_id': {'key': 'typeProperties.marketplaceID', 'type': 'object'},
Expand All @@ -81,8 +89,8 @@ class AmazonMWSLinkedService(LinkedService):
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
}

def __init__(self, endpoint, marketplace_id, seller_id, access_key_id, additional_properties=None, connect_via=None, description=None, mws_auth_token=None, secret_key=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None):
super(AmazonMWSLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description)
def __init__(self, endpoint, marketplace_id, seller_id, access_key_id, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, mws_auth_token=None, secret_key=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None):
super(AmazonMWSLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations)
self.endpoint = endpoint
self.marketplace_id = marketplace_id
self.seller_id = seller_id
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,9 @@ class AmazonMWSObjectDataset(Dataset):
:param parameters: Parameters for dataset.
:type parameters: dict[str,
~azure.mgmt.datafactory.models.ParameterSpecification]
:param annotations: List of tags that can be used for describing the
Dataset.
:type annotations: list[object]
:param type: Constant filled by server.
:type type: str
"""
Expand All @@ -38,6 +41,6 @@ class AmazonMWSObjectDataset(Dataset):
'type': {'required': True},
}

def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, parameters=None):
super(AmazonMWSObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, linked_service_name=linked_service_name, parameters=parameters)
def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, parameters=None, annotations=None):
super(AmazonMWSObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations)
self.type = 'AmazonMWSObject'
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,12 @@ class AmazonRedshiftLinkedService(LinkedService):
~azure.mgmt.datafactory.models.IntegrationRuntimeReference
:param description: Linked service description.
:type description: str
:param parameters: Parameters for linked service.
:type parameters: dict[str,
~azure.mgmt.datafactory.models.ParameterSpecification]
:param annotations: List of tags that can be used for describing the
Dataset.
:type annotations: list[object]
:param type: Constant filled by server.
:type type: str
:param server: The name of the Amazon Redshift server. Type: string (or
Expand Down Expand Up @@ -56,6 +62,8 @@ class AmazonRedshiftLinkedService(LinkedService):
'additional_properties': {'key': '', 'type': '{object}'},
'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'},
'description': {'key': 'description', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'},
'annotations': {'key': 'annotations', 'type': '[object]'},
'type': {'key': 'type', 'type': 'str'},
'server': {'key': 'typeProperties.server', 'type': 'object'},
'username': {'key': 'typeProperties.username', 'type': 'object'},
Expand All @@ -65,8 +73,8 @@ class AmazonRedshiftLinkedService(LinkedService):
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
}

def __init__(self, server, database, additional_properties=None, connect_via=None, description=None, username=None, password=None, port=None, encrypted_credential=None):
super(AmazonRedshiftLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description)
def __init__(self, server, database, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, username=None, password=None, port=None, encrypted_credential=None):
super(AmazonRedshiftLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations)
self.server = server
self.username = username
self.password = password
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,9 @@ class AmazonS3Dataset(Dataset):
:param parameters: Parameters for dataset.
:type parameters: dict[str,
~azure.mgmt.datafactory.models.ParameterSpecification]
:param annotations: List of tags that can be used for describing the
Dataset.
:type annotations: list[object]
:param type: Constant filled by server.
:type type: str
:param bucket_name: The name of the Amazon S3 bucket. Type: string (or
Expand Down Expand Up @@ -62,6 +65,7 @@ class AmazonS3Dataset(Dataset):
'structure': {'key': 'structure', 'type': 'object'},
'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'},
'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'},
'annotations': {'key': 'annotations', 'type': '[object]'},
'type': {'key': 'type', 'type': 'str'},
'bucket_name': {'key': 'typeProperties.bucketName', 'type': 'object'},
'key': {'key': 'typeProperties.key', 'type': 'object'},
Expand All @@ -71,8 +75,8 @@ class AmazonS3Dataset(Dataset):
'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'},
}

def __init__(self, linked_service_name, bucket_name, additional_properties=None, description=None, structure=None, parameters=None, key=None, prefix=None, version=None, format=None, compression=None):
super(AmazonS3Dataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, linked_service_name=linked_service_name, parameters=parameters)
def __init__(self, linked_service_name, bucket_name, additional_properties=None, description=None, structure=None, parameters=None, annotations=None, key=None, prefix=None, version=None, format=None, compression=None):
super(AmazonS3Dataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations)
self.bucket_name = bucket_name
self.key = key
self.prefix = prefix
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,12 @@ class AmazonS3LinkedService(LinkedService):
~azure.mgmt.datafactory.models.IntegrationRuntimeReference
:param description: Linked service description.
:type description: str
:param parameters: Parameters for linked service.
:type parameters: dict[str,
~azure.mgmt.datafactory.models.ParameterSpecification]
:param annotations: List of tags that can be used for describing the
Dataset.
:type annotations: list[object]
:param type: Constant filled by server.
:type type: str
:param access_key_id: The access key identifier of the Amazon S3 Identity
Expand All @@ -46,14 +52,16 @@ class AmazonS3LinkedService(LinkedService):
'additional_properties': {'key': '', 'type': '{object}'},
'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'},
'description': {'key': 'description', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'},
'annotations': {'key': 'annotations', 'type': '[object]'},
'type': {'key': 'type', 'type': 'str'},
'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'},
'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
}

def __init__(self, additional_properties=None, connect_via=None, description=None, access_key_id=None, secret_access_key=None, encrypted_credential=None):
super(AmazonS3LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description)
def __init__(self, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, access_key_id=None, secret_access_key=None, encrypted_credential=None):
super(AmazonS3LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations)
self.access_key_id = access_key_id
self.secret_access_key = secret_access_key
self.encrypted_credential = encrypted_credential
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,12 @@ class AzureBatchLinkedService(LinkedService):
~azure.mgmt.datafactory.models.IntegrationRuntimeReference
:param description: Linked service description.
:type description: str
:param parameters: Parameters for linked service.
:type parameters: dict[str,
~azure.mgmt.datafactory.models.ParameterSpecification]
:param annotations: List of tags that can be used for describing the
Dataset.
:type annotations: list[object]
:param type: Constant filled by server.
:type type: str
:param account_name: The Azure Batch account name. Type: string (or
Expand Down Expand Up @@ -57,6 +63,8 @@ class AzureBatchLinkedService(LinkedService):
'additional_properties': {'key': '', 'type': '{object}'},
'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'},
'description': {'key': 'description', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'},
'annotations': {'key': 'annotations', 'type': '[object]'},
'type': {'key': 'type', 'type': 'str'},
'account_name': {'key': 'typeProperties.accountName', 'type': 'object'},
'access_key': {'key': 'typeProperties.accessKey', 'type': 'SecretBase'},
Expand All @@ -66,8 +74,8 @@ class AzureBatchLinkedService(LinkedService):
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
}

def __init__(self, account_name, batch_uri, pool_name, linked_service_name, additional_properties=None, connect_via=None, description=None, access_key=None, encrypted_credential=None):
super(AzureBatchLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description)
def __init__(self, account_name, batch_uri, pool_name, linked_service_name, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, access_key=None, encrypted_credential=None):
super(AzureBatchLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations)
self.account_name = account_name
self.access_key = access_key
self.batch_uri = batch_uri
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,9 @@ class AzureBlobDataset(Dataset):
:param parameters: Parameters for dataset.
:type parameters: dict[str,
~azure.mgmt.datafactory.models.ParameterSpecification]
:param annotations: List of tags that can be used for describing the
Dataset.
:type annotations: list[object]
:param type: Constant filled by server.
:type type: str
:param folder_path: The path of the Azure Blob storage. Type: string (or
Expand Down Expand Up @@ -57,6 +60,7 @@ class AzureBlobDataset(Dataset):
'structure': {'key': 'structure', 'type': 'object'},
'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'},
'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'},
'annotations': {'key': 'annotations', 'type': '[object]'},
'type': {'key': 'type', 'type': 'str'},
'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'},
'table_root_location': {'key': 'typeProperties.tableRootLocation', 'type': 'object'},
Expand All @@ -65,8 +69,8 @@ class AzureBlobDataset(Dataset):
'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'},
}

def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, parameters=None, folder_path=None, table_root_location=None, file_name=None, format=None, compression=None):
super(AzureBlobDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, linked_service_name=linked_service_name, parameters=parameters)
def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, parameters=None, annotations=None, folder_path=None, table_root_location=None, file_name=None, format=None, compression=None):
super(AzureBlobDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations)
self.folder_path = folder_path
self.table_root_location = table_root_location
self.file_name = file_name
Expand Down
Loading