diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_metadata.json b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_metadata.json deleted file mode 100644 index dd5e00735108..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_metadata.json +++ /dev/null @@ -1,122 +0,0 @@ -{ - "chosen_version": "2018-06-01", - "total_api_version_list": ["2018-06-01"], - "client": { - "name": "DataFactoryManagementClient", - "filename": "_data_factory_management_client", - "description": "The Azure Data Factory V2 management API provides a RESTful set of web services that interact with Azure Data Factory V2 services.", - "base_url": "\u0027https://management.azure.com\u0027", - "custom_base_url": null, - "azure_arm": true, - "has_lro_operations": true, - "client_side_validation": false, - "sync_imports": "{\"typing\": {\"azurecore\": {\"azure.core.credentials\": [\"TokenCredential\"]}}, \"regular\": {\"azurecore\": {\"azure.profiles\": [\"KnownProfiles\", \"ProfileDefinition\"], \"azure.profiles.multiapiclient\": [\"MultiApiClientMixin\"], \"msrest\": [\"Deserializer\", \"Serializer\"], \"azure.mgmt.core\": [\"ARMPipelineClient\"]}, \"local\": {\"._configuration\": [\"DataFactoryManagementClientConfiguration\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\", \"Optional\"]}, \"azurecore\": {\"azure.core.pipeline.transport\": [\"HttpRequest\", \"HttpResponse\"]}}}", - "async_imports": "{\"typing\": {\"azurecore\": {\"azure.core.credentials_async\": [\"AsyncTokenCredential\"]}}, \"regular\": {\"azurecore\": {\"azure.profiles\": [\"KnownProfiles\", \"ProfileDefinition\"], \"azure.profiles.multiapiclient\": [\"MultiApiClientMixin\"], \"msrest\": [\"Deserializer\", \"Serializer\"], \"azure.mgmt.core\": [\"AsyncARMPipelineClient\"]}, \"local\": {\"._configuration\": [\"DataFactoryManagementClientConfiguration\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\", \"Optional\"]}, \"azurecore\": {\"azure.core.pipeline.transport\": [\"AsyncHttpResponse\", \"HttpRequest\"]}}}" - }, - "global_parameters": { - "sync": { - "credential": { - "signature": "credential, # type: \"TokenCredential\"", - "description": "Credential needed for the client to connect to Azure.", - "docstring_type": "~azure.core.credentials.TokenCredential", - "required": true - }, - "subscription_id": { - "signature": "subscription_id, # type: str", - "description": "The subscription identifier.", - "docstring_type": "str", - "required": true - } - }, - "async": { - "credential": { - "signature": "credential: \"AsyncTokenCredential\",", - "description": "Credential needed for the client to connect to Azure.", - "docstring_type": "~azure.core.credentials_async.AsyncTokenCredential", - "required": true - }, - "subscription_id": { - "signature": "subscription_id: str,", - "description": "The subscription identifier.", - "docstring_type": "str", - "required": true - } - }, - "constant": { - }, - "call": "credential, subscription_id", - "service_client_specific": { - "sync": { - "api_version": { - "signature": "api_version=None, # type: Optional[str]", - "description": "API version to use if no profile is provided, or if missing in profile.", - "docstring_type": "str", - "required": false - }, - "base_url": { - "signature": "base_url=None, # type: Optional[str]", - "description": "Service URL", - "docstring_type": "str", - "required": false - }, - "profile": { - "signature": "profile=KnownProfiles.default, # type: KnownProfiles", - "description": "A profile definition, from KnownProfiles to dict.", - "docstring_type": "azure.profiles.KnownProfiles", - "required": false - } - }, - "async": { - "api_version": { - "signature": "api_version: Optional[str] = None,", - "description": "API version to use if no profile is provided, or if missing in profile.", - "docstring_type": "str", - "required": false - }, - "base_url": { - "signature": "base_url: Optional[str] = None,", - "description": "Service URL", - "docstring_type": "str", - "required": false - }, - "profile": { - "signature": "profile: KnownProfiles = KnownProfiles.default,", - "description": "A profile definition, from KnownProfiles to dict.", - "docstring_type": "azure.profiles.KnownProfiles", - "required": false - } - } - } - }, - "config": { - "credential": true, - "credential_scopes": ["https://management.azure.com/.default"], - "credential_default_policy_type": "BearerTokenCredentialPolicy", - "credential_default_policy_type_has_async_version": true, - "credential_key_header_name": null, - "sync_imports": "{\"regular\": {\"azurecore\": {\"azure.core.configuration\": [\"Configuration\"], \"azure.core.pipeline\": [\"policies\"], \"azure.mgmt.core.policies\": [\"ARMHttpLoggingPolicy\"]}, \"local\": {\"._version\": [\"VERSION\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\"]}}, \"typing\": {\"azurecore\": {\"azure.core.credentials\": [\"TokenCredential\"]}}}", - "async_imports": "{\"regular\": {\"azurecore\": {\"azure.core.configuration\": [\"Configuration\"], \"azure.core.pipeline\": [\"policies\"], \"azure.mgmt.core.policies\": [\"ARMHttpLoggingPolicy\"]}, \"local\": {\".._version\": [\"VERSION\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\"]}}, \"typing\": {\"azurecore\": {\"azure.core.credentials_async\": [\"AsyncTokenCredential\"]}}}" - }, - "operation_groups": { - "operations": "Operations", - "factories": "FactoriesOperations", - "exposure_control": "ExposureControlOperations", - "integration_runtimes": "IntegrationRuntimesOperations", - "integration_runtime_object_metadata": "IntegrationRuntimeObjectMetadataOperations", - "integration_runtime_nodes": "IntegrationRuntimeNodesOperations", - "linked_services": "LinkedServicesOperations", - "datasets": "DatasetsOperations", - "pipelines": "PipelinesOperations", - "pipeline_runs": "PipelineRunsOperations", - "activity_runs": "ActivityRunsOperations", - "triggers": "TriggersOperations", - "trigger_runs": "TriggerRunsOperations", - "data_flows": "DataFlowsOperations", - "data_flow_debug_session": "DataFlowDebugSessionOperations", - "managed_virtual_networks": "ManagedVirtualNetworksOperations", - "managed_private_endpoints": "ManagedPrivateEndpointsOperations", - "private_end_point_connections": "PrivateEndPointConnectionsOperations", - "private_endpoint_connection": "PrivateEndpointConnectionOperations", - "private_link_resources": "PrivateLinkResourcesOperations" - } -} \ No newline at end of file diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_version.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_version.py index 48944bf3938a..c47f66669f1b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_version.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "2.0.0" +VERSION = "1.0.0" diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py index d05e600bf0b6..68095cd9316e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py @@ -279,6 +279,7 @@ from ._models_py3 import FileSystemSink from ._models_py3 import FileSystemSource from ._models_py3 import FilterActivity + from ._models_py3 import Flowlet from ._models_py3 import ForEachActivity from ._models_py3 import FormatReadSettings from ._models_py3 import FormatWriteSettings @@ -974,6 +975,7 @@ from ._models import FileSystemSink # type: ignore from ._models import FileSystemSource # type: ignore from ._models import FilterActivity # type: ignore + from ._models import Flowlet # type: ignore from ._models import ForEachActivity # type: ignore from ._models import FormatReadSettings # type: ignore from ._models import FormatWriteSettings # type: ignore @@ -1402,7 +1404,7 @@ AvroCompressionCodec, AzureFunctionActivityMethod, AzureSearchIndexWriteBehaviorType, - BlobEventTypes, + BlobEventType, CassandraSourceReadConsistencyLevels, CompressionCodec, CopyBehaviorType, @@ -1768,6 +1770,7 @@ 'FileSystemSink', 'FileSystemSource', 'FilterActivity', + 'Flowlet', 'ForEachActivity', 'FormatReadSettings', 'FormatWriteSettings', @@ -2194,7 +2197,7 @@ 'AvroCompressionCodec', 'AzureFunctionActivityMethod', 'AzureSearchIndexWriteBehaviorType', - 'BlobEventTypes', + 'BlobEventType', 'CassandraSourceReadConsistencyLevels', 'CompressionCodec', 'CopyBehaviorType', diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py index 158bf6b35e73..eca4ab8df827 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py @@ -59,7 +59,7 @@ class AzureSearchIndexWriteBehaviorType(with_metaclass(_CaseInsensitiveEnumMeta, MERGE = "Merge" UPLOAD = "Upload" -class BlobEventTypes(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class BlobEventType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): MICROSOFT_STORAGE_BLOB_CREATED = "Microsoft.Storage.BlobCreated" MICROSOFT_STORAGE_BLOB_DELETED = "Microsoft.Storage.BlobDeleted" diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py index 5f4f0c460837..297f68b4a96b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py @@ -8198,7 +8198,7 @@ class BlobEventsTrigger(MultiplePipelineTrigger): :param ignore_empty_blobs: If set to true, blobs with zero bytes will be ignored. :type ignore_empty_blobs: bool :param events: Required. The type of events that cause this trigger to fire. - :type events: list[str or ~azure.mgmt.datafactory.models.BlobEventTypes] + :type events: list[str or ~azure.mgmt.datafactory.models.BlobEventType] :param scope: Required. The ARM resource ID of the Storage Account. :type scope: str """ @@ -11051,7 +11051,7 @@ class DataFlow(msrest.serialization.Model): """Azure Data Factory nested object which contains a flow with data movements and transformations. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: MappingDataFlow, WranglingDataFlow. + sub-classes are: Flowlet, MappingDataFlow, WranglingDataFlow. All required parameters must be populated in order to send to Azure. @@ -11078,7 +11078,7 @@ class DataFlow(msrest.serialization.Model): } _subtype_map = { - 'type': {'MappingDataFlow': 'MappingDataFlow', 'WranglingDataFlow': 'WranglingDataFlow'} + 'type': {'Flowlet': 'Flowlet', 'MappingDataFlow': 'MappingDataFlow', 'WranglingDataFlow': 'WranglingDataFlow'} } def __init__( @@ -11190,6 +11190,8 @@ class DataFlowDebugPackage(msrest.serialization.Model): :type session_id: str :param data_flow: Data flow instance. :type data_flow: ~azure.mgmt.datafactory.models.DataFlowDebugResource + :param data_flows: List of Data flows. + :type data_flows: list[~azure.mgmt.datafactory.models.DataFlowDebugResource] :param datasets: List of datasets. :type datasets: list[~azure.mgmt.datafactory.models.DatasetDebugResource] :param linked_services: List of linked services. @@ -11204,6 +11206,7 @@ class DataFlowDebugPackage(msrest.serialization.Model): 'additional_properties': {'key': '', 'type': '{object}'}, 'session_id': {'key': 'sessionId', 'type': 'str'}, 'data_flow': {'key': 'dataFlow', 'type': 'DataFlowDebugResource'}, + 'data_flows': {'key': 'dataFlows', 'type': '[DataFlowDebugResource]'}, 'datasets': {'key': 'datasets', 'type': '[DatasetDebugResource]'}, 'linked_services': {'key': 'linkedServices', 'type': '[LinkedServiceDebugResource]'}, 'staging': {'key': 'staging', 'type': 'DataFlowStagingInfo'}, @@ -11218,6 +11221,7 @@ def __init__( self.additional_properties = kwargs.get('additional_properties', None) self.session_id = kwargs.get('session_id', None) self.data_flow = kwargs.get('data_flow', None) + self.data_flows = kwargs.get('data_flows', None) self.datasets = kwargs.get('datasets', None) self.linked_services = kwargs.get('linked_services', None) self.staging = kwargs.get('staging', None) @@ -11496,6 +11500,8 @@ class Transformation(msrest.serialization.Model): :type name: str :param description: Transformation description. :type description: str + :param flowlet: Flowlet Reference. + :type flowlet: ~azure.mgmt.datafactory.models.DataFlowReference """ _validation = { @@ -11505,6 +11511,7 @@ class Transformation(msrest.serialization.Model): _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, + 'flowlet': {'key': 'flowlet', 'type': 'DataFlowReference'}, } def __init__( @@ -11514,6 +11521,7 @@ def __init__( super(Transformation, self).__init__(**kwargs) self.name = kwargs['name'] self.description = kwargs.get('description', None) + self.flowlet = kwargs.get('flowlet', None) class DataFlowSink(Transformation): @@ -11525,6 +11533,8 @@ class DataFlowSink(Transformation): :type name: str :param description: Transformation description. :type description: str + :param flowlet: Flowlet Reference. + :type flowlet: ~azure.mgmt.datafactory.models.DataFlowReference :param dataset: Dataset reference. :type dataset: ~azure.mgmt.datafactory.models.DatasetReference :param linked_service: Linked service reference. @@ -11540,6 +11550,7 @@ class DataFlowSink(Transformation): _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, + 'flowlet': {'key': 'flowlet', 'type': 'DataFlowReference'}, 'dataset': {'key': 'dataset', 'type': 'DatasetReference'}, 'linked_service': {'key': 'linkedService', 'type': 'LinkedServiceReference'}, 'schema_linked_service': {'key': 'schemaLinkedService', 'type': 'LinkedServiceReference'}, @@ -11564,6 +11575,8 @@ class DataFlowSource(Transformation): :type name: str :param description: Transformation description. :type description: str + :param flowlet: Flowlet Reference. + :type flowlet: ~azure.mgmt.datafactory.models.DataFlowReference :param dataset: Dataset reference. :type dataset: ~azure.mgmt.datafactory.models.DatasetReference :param linked_service: Linked service reference. @@ -11579,6 +11592,7 @@ class DataFlowSource(Transformation): _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, + 'flowlet': {'key': 'flowlet', 'type': 'DataFlowReference'}, 'dataset': {'key': 'dataset', 'type': 'DatasetReference'}, 'linked_service': {'key': 'linkedService', 'type': 'LinkedServiceReference'}, 'schema_linked_service': {'key': 'schemaLinkedService', 'type': 'LinkedServiceReference'}, @@ -16009,6 +16023,70 @@ def __init__( self.condition = kwargs['condition'] +class Flowlet(DataFlow): + """Data flow flowlet. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Type of data flow.Constant filled by server. + :type type: str + :param description: The description of the data flow. + :type description: str + :param annotations: List of tags that can be used for describing the data flow. + :type annotations: list[any] + :param folder: The folder that this data flow is in. If not specified, Data flow will appear at + the root level. + :type folder: ~azure.mgmt.datafactory.models.DataFlowFolder + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, any] + :param sources: List of sources in Flowlet. + :type sources: list[~azure.mgmt.datafactory.models.DataFlowSource] + :param sinks: List of sinks in Flowlet. + :type sinks: list[~azure.mgmt.datafactory.models.DataFlowSink] + :param transformations: List of transformations in Flowlet. + :type transformations: list[~azure.mgmt.datafactory.models.Transformation] + :param script: Flowlet script. + :type script: str + :param script_lines: Flowlet script lines. + :type script_lines: list[str] + :param additional_properties1: Any object. + :type additional_properties1: any + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DataFlowFolder'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'sources': {'key': 'typeProperties.sources', 'type': '[DataFlowSource]'}, + 'sinks': {'key': 'typeProperties.sinks', 'type': '[DataFlowSink]'}, + 'transformations': {'key': 'typeProperties.transformations', 'type': '[Transformation]'}, + 'script': {'key': 'typeProperties.script', 'type': 'str'}, + 'script_lines': {'key': 'typeProperties.scriptLines', 'type': '[str]'}, + 'additional_properties1': {'key': 'typeProperties.additionalProperties', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(Flowlet, self).__init__(**kwargs) + self.type = 'Flowlet' # type: str + self.additional_properties = kwargs.get('additional_properties', None) + self.sources = kwargs.get('sources', None) + self.sinks = kwargs.get('sinks', None) + self.transformations = kwargs.get('transformations', None) + self.script = kwargs.get('script', None) + self.script_lines = kwargs.get('script_lines', None) + self.additional_properties1 = kwargs.get('additional_properties1', None) + + class ForEachActivity(ControlActivity): """This activity is used for iterating over a collection and execute given activities. @@ -22502,6 +22580,8 @@ class MappingDataFlow(DataFlow): :type transformations: list[~azure.mgmt.datafactory.models.Transformation] :param script: DataFlow script. :type script: str + :param script_lines: Data flow script lines. + :type script_lines: list[str] """ _validation = { @@ -22517,6 +22597,7 @@ class MappingDataFlow(DataFlow): 'sinks': {'key': 'typeProperties.sinks', 'type': '[DataFlowSink]'}, 'transformations': {'key': 'typeProperties.transformations', 'type': '[Transformation]'}, 'script': {'key': 'typeProperties.script', 'type': 'str'}, + 'script_lines': {'key': 'typeProperties.scriptLines', 'type': '[str]'}, } def __init__( @@ -22529,6 +22610,7 @@ def __init__( self.sinks = kwargs.get('sinks', None) self.transformations = kwargs.get('transformations', None) self.script = kwargs.get('script', None) + self.script_lines = kwargs.get('script_lines', None) class MariaDBLinkedService(LinkedService): @@ -27506,6 +27588,8 @@ class PowerQuerySink(DataFlowSink): :type name: str :param description: Transformation description. :type description: str + :param flowlet: Flowlet Reference. + :type flowlet: ~azure.mgmt.datafactory.models.DataFlowReference :param dataset: Dataset reference. :type dataset: ~azure.mgmt.datafactory.models.DatasetReference :param linked_service: Linked service reference. @@ -27523,6 +27607,7 @@ class PowerQuerySink(DataFlowSink): _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, + 'flowlet': {'key': 'flowlet', 'type': 'DataFlowReference'}, 'dataset': {'key': 'dataset', 'type': 'DatasetReference'}, 'linked_service': {'key': 'linkedService', 'type': 'LinkedServiceReference'}, 'schema_linked_service': {'key': 'schemaLinkedService', 'type': 'LinkedServiceReference'}, @@ -27569,6 +27654,8 @@ class PowerQuerySource(DataFlowSource): :type name: str :param description: Transformation description. :type description: str + :param flowlet: Flowlet Reference. + :type flowlet: ~azure.mgmt.datafactory.models.DataFlowReference :param dataset: Dataset reference. :type dataset: ~azure.mgmt.datafactory.models.DatasetReference :param linked_service: Linked service reference. @@ -27586,6 +27673,7 @@ class PowerQuerySource(DataFlowSource): _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, + 'flowlet': {'key': 'flowlet', 'type': 'DataFlowReference'}, 'dataset': {'key': 'dataset', 'type': 'DatasetReference'}, 'linked_service': {'key': 'linkedService', 'type': 'LinkedServiceReference'}, 'schema_linked_service': {'key': 'schemaLinkedService', 'type': 'LinkedServiceReference'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py index 963145597ea9..dc13c1f49ae7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py @@ -9422,7 +9422,7 @@ class BlobEventsTrigger(MultiplePipelineTrigger): :param ignore_empty_blobs: If set to true, blobs with zero bytes will be ignored. :type ignore_empty_blobs: bool :param events: Required. The type of events that cause this trigger to fire. - :type events: list[str or ~azure.mgmt.datafactory.models.BlobEventTypes] + :type events: list[str or ~azure.mgmt.datafactory.models.BlobEventType] :param scope: Required. The ARM resource ID of the Storage Account. :type scope: str """ @@ -9451,7 +9451,7 @@ class BlobEventsTrigger(MultiplePipelineTrigger): def __init__( self, *, - events: List[Union[str, "BlobEventTypes"]], + events: List[Union[str, "BlobEventType"]], scope: str, additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, @@ -12696,7 +12696,7 @@ class DataFlow(msrest.serialization.Model): """Azure Data Factory nested object which contains a flow with data movements and transformations. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: MappingDataFlow, WranglingDataFlow. + sub-classes are: Flowlet, MappingDataFlow, WranglingDataFlow. All required parameters must be populated in order to send to Azure. @@ -12723,7 +12723,7 @@ class DataFlow(msrest.serialization.Model): } _subtype_map = { - 'type': {'MappingDataFlow': 'MappingDataFlow', 'WranglingDataFlow': 'WranglingDataFlow'} + 'type': {'Flowlet': 'Flowlet', 'MappingDataFlow': 'MappingDataFlow', 'WranglingDataFlow': 'WranglingDataFlow'} } def __init__( @@ -12851,6 +12851,8 @@ class DataFlowDebugPackage(msrest.serialization.Model): :type session_id: str :param data_flow: Data flow instance. :type data_flow: ~azure.mgmt.datafactory.models.DataFlowDebugResource + :param data_flows: List of Data flows. + :type data_flows: list[~azure.mgmt.datafactory.models.DataFlowDebugResource] :param datasets: List of datasets. :type datasets: list[~azure.mgmt.datafactory.models.DatasetDebugResource] :param linked_services: List of linked services. @@ -12865,6 +12867,7 @@ class DataFlowDebugPackage(msrest.serialization.Model): 'additional_properties': {'key': '', 'type': '{object}'}, 'session_id': {'key': 'sessionId', 'type': 'str'}, 'data_flow': {'key': 'dataFlow', 'type': 'DataFlowDebugResource'}, + 'data_flows': {'key': 'dataFlows', 'type': '[DataFlowDebugResource]'}, 'datasets': {'key': 'datasets', 'type': '[DatasetDebugResource]'}, 'linked_services': {'key': 'linkedServices', 'type': '[LinkedServiceDebugResource]'}, 'staging': {'key': 'staging', 'type': 'DataFlowStagingInfo'}, @@ -12877,6 +12880,7 @@ def __init__( additional_properties: Optional[Dict[str, Any]] = None, session_id: Optional[str] = None, data_flow: Optional["DataFlowDebugResource"] = None, + data_flows: Optional[List["DataFlowDebugResource"]] = None, datasets: Optional[List["DatasetDebugResource"]] = None, linked_services: Optional[List["LinkedServiceDebugResource"]] = None, staging: Optional["DataFlowStagingInfo"] = None, @@ -12887,6 +12891,7 @@ def __init__( self.additional_properties = additional_properties self.session_id = session_id self.data_flow = data_flow + self.data_flows = data_flows self.datasets = datasets self.linked_services = linked_services self.staging = staging @@ -13196,6 +13201,8 @@ class Transformation(msrest.serialization.Model): :type name: str :param description: Transformation description. :type description: str + :param flowlet: Flowlet Reference. + :type flowlet: ~azure.mgmt.datafactory.models.DataFlowReference """ _validation = { @@ -13205,6 +13212,7 @@ class Transformation(msrest.serialization.Model): _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, + 'flowlet': {'key': 'flowlet', 'type': 'DataFlowReference'}, } def __init__( @@ -13212,11 +13220,13 @@ def __init__( *, name: str, description: Optional[str] = None, + flowlet: Optional["DataFlowReference"] = None, **kwargs ): super(Transformation, self).__init__(**kwargs) self.name = name self.description = description + self.flowlet = flowlet class DataFlowSink(Transformation): @@ -13228,6 +13238,8 @@ class DataFlowSink(Transformation): :type name: str :param description: Transformation description. :type description: str + :param flowlet: Flowlet Reference. + :type flowlet: ~azure.mgmt.datafactory.models.DataFlowReference :param dataset: Dataset reference. :type dataset: ~azure.mgmt.datafactory.models.DatasetReference :param linked_service: Linked service reference. @@ -13243,6 +13255,7 @@ class DataFlowSink(Transformation): _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, + 'flowlet': {'key': 'flowlet', 'type': 'DataFlowReference'}, 'dataset': {'key': 'dataset', 'type': 'DatasetReference'}, 'linked_service': {'key': 'linkedService', 'type': 'LinkedServiceReference'}, 'schema_linked_service': {'key': 'schemaLinkedService', 'type': 'LinkedServiceReference'}, @@ -13253,12 +13266,13 @@ def __init__( *, name: str, description: Optional[str] = None, + flowlet: Optional["DataFlowReference"] = None, dataset: Optional["DatasetReference"] = None, linked_service: Optional["LinkedServiceReference"] = None, schema_linked_service: Optional["LinkedServiceReference"] = None, **kwargs ): - super(DataFlowSink, self).__init__(name=name, description=description, **kwargs) + super(DataFlowSink, self).__init__(name=name, description=description, flowlet=flowlet, **kwargs) self.dataset = dataset self.linked_service = linked_service self.schema_linked_service = schema_linked_service @@ -13273,6 +13287,8 @@ class DataFlowSource(Transformation): :type name: str :param description: Transformation description. :type description: str + :param flowlet: Flowlet Reference. + :type flowlet: ~azure.mgmt.datafactory.models.DataFlowReference :param dataset: Dataset reference. :type dataset: ~azure.mgmt.datafactory.models.DatasetReference :param linked_service: Linked service reference. @@ -13288,6 +13304,7 @@ class DataFlowSource(Transformation): _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, + 'flowlet': {'key': 'flowlet', 'type': 'DataFlowReference'}, 'dataset': {'key': 'dataset', 'type': 'DatasetReference'}, 'linked_service': {'key': 'linkedService', 'type': 'LinkedServiceReference'}, 'schema_linked_service': {'key': 'schemaLinkedService', 'type': 'LinkedServiceReference'}, @@ -13298,12 +13315,13 @@ def __init__( *, name: str, description: Optional[str] = None, + flowlet: Optional["DataFlowReference"] = None, dataset: Optional["DatasetReference"] = None, linked_service: Optional["LinkedServiceReference"] = None, schema_linked_service: Optional["LinkedServiceReference"] = None, **kwargs ): - super(DataFlowSource, self).__init__(name=name, description=description, **kwargs) + super(DataFlowSource, self).__init__(name=name, description=description, flowlet=flowlet, **kwargs) self.dataset = dataset self.linked_service = linked_service self.schema_linked_service = schema_linked_service @@ -18359,6 +18377,81 @@ def __init__( self.condition = condition +class Flowlet(DataFlow): + """Data flow flowlet. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Type of data flow.Constant filled by server. + :type type: str + :param description: The description of the data flow. + :type description: str + :param annotations: List of tags that can be used for describing the data flow. + :type annotations: list[any] + :param folder: The folder that this data flow is in. If not specified, Data flow will appear at + the root level. + :type folder: ~azure.mgmt.datafactory.models.DataFlowFolder + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, any] + :param sources: List of sources in Flowlet. + :type sources: list[~azure.mgmt.datafactory.models.DataFlowSource] + :param sinks: List of sinks in Flowlet. + :type sinks: list[~azure.mgmt.datafactory.models.DataFlowSink] + :param transformations: List of transformations in Flowlet. + :type transformations: list[~azure.mgmt.datafactory.models.Transformation] + :param script: Flowlet script. + :type script: str + :param script_lines: Flowlet script lines. + :type script_lines: list[str] + :param additional_properties1: Any object. + :type additional_properties1: any + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DataFlowFolder'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'sources': {'key': 'typeProperties.sources', 'type': '[DataFlowSource]'}, + 'sinks': {'key': 'typeProperties.sinks', 'type': '[DataFlowSink]'}, + 'transformations': {'key': 'typeProperties.transformations', 'type': '[Transformation]'}, + 'script': {'key': 'typeProperties.script', 'type': 'str'}, + 'script_lines': {'key': 'typeProperties.scriptLines', 'type': '[str]'}, + 'additional_properties1': {'key': 'typeProperties.additionalProperties', 'type': 'object'}, + } + + def __init__( + self, + *, + description: Optional[str] = None, + annotations: Optional[List[Any]] = None, + folder: Optional["DataFlowFolder"] = None, + additional_properties: Optional[Dict[str, Any]] = None, + sources: Optional[List["DataFlowSource"]] = None, + sinks: Optional[List["DataFlowSink"]] = None, + transformations: Optional[List["Transformation"]] = None, + script: Optional[str] = None, + script_lines: Optional[List[str]] = None, + additional_properties1: Optional[Any] = None, + **kwargs + ): + super(Flowlet, self).__init__(description=description, annotations=annotations, folder=folder, **kwargs) + self.type = 'Flowlet' # type: str + self.additional_properties = additional_properties + self.sources = sources + self.sinks = sinks + self.transformations = transformations + self.script = script + self.script_lines = script_lines + self.additional_properties1 = additional_properties1 + + class ForEachActivity(ControlActivity): """This activity is used for iterating over a collection and execute given activities. @@ -25763,6 +25856,8 @@ class MappingDataFlow(DataFlow): :type transformations: list[~azure.mgmt.datafactory.models.Transformation] :param script: DataFlow script. :type script: str + :param script_lines: Data flow script lines. + :type script_lines: list[str] """ _validation = { @@ -25778,6 +25873,7 @@ class MappingDataFlow(DataFlow): 'sinks': {'key': 'typeProperties.sinks', 'type': '[DataFlowSink]'}, 'transformations': {'key': 'typeProperties.transformations', 'type': '[Transformation]'}, 'script': {'key': 'typeProperties.script', 'type': 'str'}, + 'script_lines': {'key': 'typeProperties.scriptLines', 'type': '[str]'}, } def __init__( @@ -25790,6 +25886,7 @@ def __init__( sinks: Optional[List["DataFlowSink"]] = None, transformations: Optional[List["Transformation"]] = None, script: Optional[str] = None, + script_lines: Optional[List[str]] = None, **kwargs ): super(MappingDataFlow, self).__init__(description=description, annotations=annotations, folder=folder, **kwargs) @@ -25798,6 +25895,7 @@ def __init__( self.sinks = sinks self.transformations = transformations self.script = script + self.script_lines = script_lines class MariaDBLinkedService(LinkedService): @@ -31531,6 +31629,8 @@ class PowerQuerySink(DataFlowSink): :type name: str :param description: Transformation description. :type description: str + :param flowlet: Flowlet Reference. + :type flowlet: ~azure.mgmt.datafactory.models.DataFlowReference :param dataset: Dataset reference. :type dataset: ~azure.mgmt.datafactory.models.DatasetReference :param linked_service: Linked service reference. @@ -31548,6 +31648,7 @@ class PowerQuerySink(DataFlowSink): _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, + 'flowlet': {'key': 'flowlet', 'type': 'DataFlowReference'}, 'dataset': {'key': 'dataset', 'type': 'DatasetReference'}, 'linked_service': {'key': 'linkedService', 'type': 'LinkedServiceReference'}, 'schema_linked_service': {'key': 'schemaLinkedService', 'type': 'LinkedServiceReference'}, @@ -31559,13 +31660,14 @@ def __init__( *, name: str, description: Optional[str] = None, + flowlet: Optional["DataFlowReference"] = None, dataset: Optional["DatasetReference"] = None, linked_service: Optional["LinkedServiceReference"] = None, schema_linked_service: Optional["LinkedServiceReference"] = None, script: Optional[str] = None, **kwargs ): - super(PowerQuerySink, self).__init__(name=name, description=description, dataset=dataset, linked_service=linked_service, schema_linked_service=schema_linked_service, **kwargs) + super(PowerQuerySink, self).__init__(name=name, description=description, flowlet=flowlet, dataset=dataset, linked_service=linked_service, schema_linked_service=schema_linked_service, **kwargs) self.script = script @@ -31604,6 +31706,8 @@ class PowerQuerySource(DataFlowSource): :type name: str :param description: Transformation description. :type description: str + :param flowlet: Flowlet Reference. + :type flowlet: ~azure.mgmt.datafactory.models.DataFlowReference :param dataset: Dataset reference. :type dataset: ~azure.mgmt.datafactory.models.DatasetReference :param linked_service: Linked service reference. @@ -31621,6 +31725,7 @@ class PowerQuerySource(DataFlowSource): _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, + 'flowlet': {'key': 'flowlet', 'type': 'DataFlowReference'}, 'dataset': {'key': 'dataset', 'type': 'DatasetReference'}, 'linked_service': {'key': 'linkedService', 'type': 'LinkedServiceReference'}, 'schema_linked_service': {'key': 'schemaLinkedService', 'type': 'LinkedServiceReference'}, @@ -31632,13 +31737,14 @@ def __init__( *, name: str, description: Optional[str] = None, + flowlet: Optional["DataFlowReference"] = None, dataset: Optional["DatasetReference"] = None, linked_service: Optional["LinkedServiceReference"] = None, schema_linked_service: Optional["LinkedServiceReference"] = None, script: Optional[str] = None, **kwargs ): - super(PowerQuerySource, self).__init__(name=name, description=description, dataset=dataset, linked_service=linked_service, schema_linked_service=schema_linked_service, **kwargs) + super(PowerQuerySource, self).__init__(name=name, description=description, flowlet=flowlet, dataset=dataset, linked_service=linked_service, schema_linked_service=schema_linked_service, **kwargs) self.script = script