From 7db825dd13765587a7e005057b7939a55e0dfbc7 Mon Sep 17 00:00:00 2001 From: Jingshu923 <52914166+Jingshu923@users.noreply.github.com> Date: Mon, 13 Dec 2021 16:38:05 +0800 Subject: [PATCH] [DataFactory]Added new features into 5.0.0 (#25595) * [DataFactory]Supported AmazonRdsForOracle Source * [DataFactory]Added new features into 4.25.0 * [DataFactory]Added new features into 4.26.0 * [DataFactory]Supported Dataflow flowlets in ADF * update for breakchanges * [DataFactory]Added new features into 4.28.0 * add more files * [DataFactory]Added new features into 4.29.0 * update version --- .../datafactory_resource-manager.txt | 4 ++-- .../src/CHANGELOG.md | 8 ++++++++ .../src/Generated/Models/DataFlowReference.cs | 10 +++++++++- .../src/Generated/Models/Flowlet.cs | 18 +----------------- .../src/Generated/Models/FtpReadSettings.cs | 14 +++++++++++++- .../src/Generated/Models/PipelineRun.cs | 6 ++++-- .../src/Generated/Models/SftpReadSettings.cs | 14 +++++++++++++- ...crosoft.Azure.Management.DataFactory.csproj | 8 +++++--- .../src/Properties/AssemblyInfo.cs | 4 ++-- .../tests/JsonSamples/PipelineJsonSamples.cs | 3 ++- 10 files changed, 59 insertions(+), 30 deletions(-) diff --git a/eng/mgmt/mgmtmetadata/datafactory_resource-manager.txt b/eng/mgmt/mgmtmetadata/datafactory_resource-manager.txt index 33bd1aca4b18..620a19559122 100644 --- a/eng/mgmt/mgmtmetadata/datafactory_resource-manager.txt +++ b/eng/mgmt/mgmtmetadata/datafactory_resource-manager.txt @@ -5,11 +5,11 @@ Generating CSharp code Executing AutoRest command cmd.exe /c autorest.cmd https://github.com/Azure/azure-rest-api-specs/blob/main/specification/datafactory/resource-manager/readme.md --csharp --version=v2 --reflect-api-versions --tag=package-2018-06 --csharp-sdks-folder=D:\Projects\azure-sdk-for-net\sdk Autorest CSharp Version: 2.3.82 -2021-11-11 01:15:27 UTC +2021-12-09 02:19:59 UTC Azure-rest-api-specs repository information GitHub fork: Azure Branch: main -Commit: 7fab18ac68c0720823cb5cd8f52faed16014f244 +Commit: 40cb73ed0c84fa515b911af5fdf77f68b764ea74 AutoRest information Requested version: v2 Bootstrapper version: autorest@2.0.4413 diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/CHANGELOG.md b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/CHANGELOG.md index b9a94ab6d86f..d3b5d422ee80 100644 --- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/CHANGELOG.md +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog for the Azure Data Factory V2 .NET SDK +## Version 5.0.0 +### Feature Additions +### Breaking Changes +- Removed unnecessary additional properties for data flow flowlet for bug fix +- Added optional property disableChunking in FTP and SFTP read settings +- Added parameters for dataflow activity +- Added possible values to pipeline run status description + ## Version 4.28.0 ### Feature Additions - Supported Dataset and LinkedService for Rest call transform diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/DataFlowReference.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/DataFlowReference.cs index da9861bce286..c46e78927edd 100644 --- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/DataFlowReference.cs +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/DataFlowReference.cs @@ -37,11 +37,13 @@ public DataFlowReference() /// message are deserialized this collection /// Reference data flow parameters from /// dataset. - public DataFlowReference(string referenceName, IDictionary additionalProperties = default(IDictionary), object datasetParameters = default(object)) + /// Data flow parameters + public DataFlowReference(string referenceName, IDictionary additionalProperties = default(IDictionary), object datasetParameters = default(object), IDictionary parameters = default(IDictionary)) { AdditionalProperties = additionalProperties; ReferenceName = referenceName; DatasetParameters = datasetParameters; + Parameters = parameters; CustomInit(); } /// @@ -76,6 +78,12 @@ static DataFlowReference() [JsonProperty(PropertyName = "datasetParameters")] public object DatasetParameters { get; set; } + /// + /// Gets or sets data flow parameters + /// + [JsonProperty(PropertyName = "parameters")] + public IDictionary Parameters { get; set; } + /// /// Data flow reference type. /// diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/Flowlet.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/Flowlet.cs index a21237d14fae..3d72ef4e17f1 100644 --- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/Flowlet.cs +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/Flowlet.cs @@ -39,24 +39,20 @@ public Flowlet() /// describing the data flow. /// The folder that this data flow is in. If not /// specified, Data flow will appear at the root level. - /// Unmatched properties from the - /// message are deserialized this collection /// List of sources in Flowlet. /// List of sinks in Flowlet. /// List of transformations in /// Flowlet. /// Flowlet script. /// Flowlet script lines. - public Flowlet(string description = default(string), IList annotations = default(IList), DataFlowFolder folder = default(DataFlowFolder), IDictionary additionalProperties = default(IDictionary), IList sources = default(IList), IList sinks = default(IList), IList transformations = default(IList), string script = default(string), IList scriptLines = default(IList), object additionalProperties1 = default(object)) + public Flowlet(string description = default(string), IList annotations = default(IList), DataFlowFolder folder = default(DataFlowFolder), IList sources = default(IList), IList sinks = default(IList), IList transformations = default(IList), string script = default(string), IList scriptLines = default(IList)) : base(description, annotations, folder) { - AdditionalProperties = additionalProperties; Sources = sources; Sinks = sinks; Transformations = transformations; Script = script; ScriptLines = scriptLines; - AdditionalProperties1 = additionalProperties1; CustomInit(); } @@ -65,13 +61,6 @@ public Flowlet() /// partial void CustomInit(); - /// - /// Gets or sets unmatched properties from the message are deserialized - /// this collection - /// - [JsonExtensionData] - public IDictionary AdditionalProperties { get; set; } - /// /// Gets or sets list of sources in Flowlet. /// @@ -102,10 +91,5 @@ public Flowlet() [JsonProperty(PropertyName = "typeProperties.scriptLines")] public IList ScriptLines { get; set; } - /// - /// - [JsonProperty(PropertyName = "typeProperties.additionalProperties")] - public object AdditionalProperties1 { get; set; } - } } diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/FtpReadSettings.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/FtpReadSettings.cs index 3a53f03d86bb..968760377d1e 100644 --- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/FtpReadSettings.cs +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/FtpReadSettings.cs @@ -61,7 +61,10 @@ public FtpReadSettings() /// string). /// Specify whether to use binary /// transfer mode for FTP stores. - public FtpReadSettings(IDictionary additionalProperties = default(IDictionary), object maxConcurrentConnections = default(object), object disableMetricsCollection = default(object), object recursive = default(object), object wildcardFolderPath = default(object), object wildcardFileName = default(object), bool? enablePartitionDiscovery = default(bool?), object partitionRootPath = default(object), object deleteFilesAfterCompletion = default(object), object fileListPath = default(object), bool? useBinaryTransfer = default(bool?)) + /// If true, disable parallel reading + /// within each file. Default is false. Type: boolean (or Expression + /// with resultType boolean). + public FtpReadSettings(IDictionary additionalProperties = default(IDictionary), object maxConcurrentConnections = default(object), object disableMetricsCollection = default(object), object recursive = default(object), object wildcardFolderPath = default(object), object wildcardFileName = default(object), bool? enablePartitionDiscovery = default(bool?), object partitionRootPath = default(object), object deleteFilesAfterCompletion = default(object), object fileListPath = default(object), bool? useBinaryTransfer = default(bool?), object disableChunking = default(object)) : base(additionalProperties, maxConcurrentConnections, disableMetricsCollection) { Recursive = recursive; @@ -72,6 +75,7 @@ public FtpReadSettings() DeleteFilesAfterCompletion = deleteFilesAfterCompletion; FileListPath = fileListPath; UseBinaryTransfer = useBinaryTransfer; + DisableChunking = disableChunking; CustomInit(); } @@ -138,5 +142,13 @@ public FtpReadSettings() [JsonProperty(PropertyName = "useBinaryTransfer")] public bool? UseBinaryTransfer { get; set; } + /// + /// Gets or sets if true, disable parallel reading within each file. + /// Default is false. Type: boolean (or Expression with resultType + /// boolean). + /// + [JsonProperty(PropertyName = "disableChunking")] + public object DisableChunking { get; set; } + } } diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/PipelineRun.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/PipelineRun.cs index f1d431a01efa..4a1eaa4c8bde 100644 --- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/PipelineRun.cs +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/PipelineRun.cs @@ -52,7 +52,8 @@ public PipelineRun() /// The end time of a pipeline run in ISO8601 /// format. /// The duration of a pipeline run. - /// The status of a pipeline run. + /// The status of a pipeline run. Possible values: + /// Queued, InProgress, Succeeded, Failed, Canceling, Cancelled /// The message from a pipeline run. public PipelineRun(IDictionary additionalProperties = default(IDictionary), string runId = default(string), string runGroupId = default(string), bool? isLatest = default(bool?), string pipelineName = default(string), IDictionary parameters = default(IDictionary), IDictionary runDimensions = default(IDictionary), PipelineRunInvokedBy invokedBy = default(PipelineRunInvokedBy), System.DateTime? lastUpdated = default(System.DateTime?), System.DateTime? runStart = default(System.DateTime?), System.DateTime? runEnd = default(System.DateTime?), int? durationInMs = default(int?), string status = default(string), string message = default(string)) { @@ -156,7 +157,8 @@ public PipelineRun() public int? DurationInMs { get; private set; } /// - /// Gets the status of a pipeline run. + /// Gets the status of a pipeline run. Possible values: Queued, + /// InProgress, Succeeded, Failed, Canceling, Cancelled /// [JsonProperty(PropertyName = "status")] public string Status { get; private set; } diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/SftpReadSettings.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/SftpReadSettings.cs index ef26ea2cd27c..a677c1cc0da9 100644 --- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/SftpReadSettings.cs +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/SftpReadSettings.cs @@ -65,7 +65,10 @@ public SftpReadSettings() /// The end of file's modified /// datetime. Type: string (or Expression with resultType /// string). - public SftpReadSettings(IDictionary additionalProperties = default(IDictionary), object maxConcurrentConnections = default(object), object disableMetricsCollection = default(object), object recursive = default(object), object wildcardFolderPath = default(object), object wildcardFileName = default(object), bool? enablePartitionDiscovery = default(bool?), object partitionRootPath = default(object), object fileListPath = default(object), object deleteFilesAfterCompletion = default(object), object modifiedDatetimeStart = default(object), object modifiedDatetimeEnd = default(object)) + /// If true, disable parallel reading + /// within each file. Default is false. Type: boolean (or Expression + /// with resultType boolean). + public SftpReadSettings(IDictionary additionalProperties = default(IDictionary), object maxConcurrentConnections = default(object), object disableMetricsCollection = default(object), object recursive = default(object), object wildcardFolderPath = default(object), object wildcardFileName = default(object), bool? enablePartitionDiscovery = default(bool?), object partitionRootPath = default(object), object fileListPath = default(object), object deleteFilesAfterCompletion = default(object), object modifiedDatetimeStart = default(object), object modifiedDatetimeEnd = default(object), object disableChunking = default(object)) : base(additionalProperties, maxConcurrentConnections, disableMetricsCollection) { Recursive = recursive; @@ -77,6 +80,7 @@ public SftpReadSettings() DeleteFilesAfterCompletion = deleteFilesAfterCompletion; ModifiedDatetimeStart = modifiedDatetimeStart; ModifiedDatetimeEnd = modifiedDatetimeEnd; + DisableChunking = disableChunking; CustomInit(); } @@ -150,5 +154,13 @@ public SftpReadSettings() [JsonProperty(PropertyName = "modifiedDatetimeEnd")] public object ModifiedDatetimeEnd { get; set; } + /// + /// Gets or sets if true, disable parallel reading within each file. + /// Default is false. Type: boolean (or Expression with resultType + /// boolean). + /// + [JsonProperty(PropertyName = "disableChunking")] + public object DisableChunking { get; set; } + } } diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Microsoft.Azure.Management.DataFactory.csproj b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Microsoft.Azure.Management.DataFactory.csproj index a285c1b3d814..f0dc89dee1a6 100644 --- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Microsoft.Azure.Management.DataFactory.csproj +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Microsoft.Azure.Management.DataFactory.csproj @@ -5,13 +5,15 @@ Microsoft.Azure.Management.DataFactory Azure Data Factory V2 is the data integration platform that goes beyond Azure Data Factory V1's orchestration and batch-processing of time-series data, with a general purpose app model supporting modern data warehousing patterns and scenarios, lift-and-shift SSIS, and data-driven SaaS applications. Compose and manage reliable and secure data integration workflows at scale. Use native ADF data connectors and Integration Runtimes to move and transform cloud and on-premises data that can be unstructured, semi-structured, and structured with Hadoop, Azure Data Lake, Spark, SQL Server, Cosmos DB and many other data platforms. - 4.28.0 + 5.0.0 Microsoft.Azure.Management.DataFactory Microsoft Azure resource management;Data Factory;ADF; diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Properties/AssemblyInfo.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Properties/AssemblyInfo.cs index 2cb08cf4d7f8..a61b4f110ab7 100644 --- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Properties/AssemblyInfo.cs +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Properties/AssemblyInfo.cs @@ -6,8 +6,8 @@ [assembly: AssemblyTitle("Microsoft Azure Data Factory Management Library")] [assembly: AssemblyDescription("Provides management functionality for Microsoft Azure Data Factory Resources.")] -[assembly: AssemblyVersion("4.28.0.0")] -[assembly: AssemblyFileVersion("4.28.0.0")] +[assembly: AssemblyVersion("5.0.0.0")] +[assembly: AssemblyFileVersion("5.0.0.0")] [assembly: AssemblyConfiguration("")] [assembly: AssemblyCompany("Microsoft")] [assembly: AssemblyProduct("Microsoft Azure .NET SDK")] diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/PipelineJsonSamples.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/PipelineJsonSamples.cs index 6ea70750358e..408e10c307b6 100644 --- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/PipelineJsonSamples.cs +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/PipelineJsonSamples.cs @@ -5042,7 +5042,8 @@ public class PipelineJsonSamples : JsonSampleCollection ""recursive"": true, ""wildcardFolderPath"": ""A*"", ""wildcardFileName"": ""*.csv"", - ""useBinaryTransfer"": true + ""useBinaryTransfer"": true, + ""disableChunking"": true }, ""formatSettings"": { ""type"": ""DelimitedTextReadSettings"",