diff --git a/src/datafactory/HISTORY.rst b/src/datafactory/HISTORY.rst index 3566e61f36a..1c139576ba0 100644 --- a/src/datafactory/HISTORY.rst +++ b/src/datafactory/HISTORY.rst @@ -3,10 +3,6 @@ Release History =============== -0.2.0 -++++++ -* add update command for linked services and triggers and datasets - 0.1.0 ++++++ * Initial release. diff --git a/src/datafactory/azext_datafactory/generated/_help.py b/src/datafactory/azext_datafactory/generated/_help.py index f42e472af34..7e562518b07 100644 --- a/src/datafactory/azext_datafactory/generated/_help.py +++ b/src/datafactory/azext_datafactory/generated/_help.py @@ -19,11 +19,14 @@ helps['datafactory factory list'] = """ type: command - short-summary: "Lists factories under the specified subscription." + short-summary: "Lists factories. And Lists factories under the specified subscription." examples: - name: Factories_ListByResourceGroup text: |- az datafactory factory list --resource-group "exampleResourceGroup" + - name: Factories_List + text: |- + az datafactory factory list """ helps['datafactory factory show'] = """ @@ -37,7 +40,7 @@ helps['datafactory factory create'] = """ type: command - short-summary: "Creates or updates a factory." + short-summary: "Create a factory." parameters: - name: --factory-vsts-configuration short-summary: "Factory's VSTS repo information." @@ -203,7 +206,7 @@ helps['datafactory integration-runtime managed create'] = """ type: command - short-summary: "Creates or updates an integration runtime." + short-summary: "Create an integration runtime." """ helps['datafactory integration-runtime self-hosted'] = """ @@ -213,7 +216,7 @@ helps['datafactory integration-runtime self-hosted create'] = """ type: command - short-summary: "Creates or updates an integration runtime." + short-summary: "Create an integration runtime." examples: - name: IntegrationRuntimes_Create text: |- @@ -432,7 +435,7 @@ helps['datafactory linked-service create'] = """ type: command - short-summary: "Creates or updates a linked service." + short-summary: "Create a linked service." examples: - name: LinkedServices_Create text: |- @@ -444,7 +447,7 @@ helps['datafactory linked-service update'] = """ type: command - short-summary: "Creates or updates a linked service." + short-summary: "Update a linked service." examples: - name: LinkedServices_Update text: |- @@ -488,7 +491,7 @@ helps['datafactory dataset create'] = """ type: command - short-summary: "Creates or updates a dataset." + short-summary: "Create a dataset." examples: - name: Datasets_Create text: |- @@ -502,7 +505,7 @@ helps['datafactory dataset update'] = """ type: command - short-summary: "Creates or updates a dataset." + short-summary: "Update a dataset." parameters: - name: --folder short-summary: "The folder that this Dataset is in. If not specified, Dataset will appear at the root level." @@ -556,7 +559,7 @@ helps['datafactory pipeline create'] = """ type: command - short-summary: "Creates or updates a pipeline." + short-summary: "Create a pipeline." examples: - name: Pipelines_Create text: |- @@ -576,7 +579,7 @@ helps['datafactory pipeline update'] = """ type: command - short-summary: "Creates or updates a pipeline." + short-summary: "Update a pipeline." examples: - name: Pipelines_Update text: |- @@ -739,7 +742,7 @@ helps['datafactory trigger create'] = """ type: command - short-summary: "Creates or updates a trigger." + short-summary: "Create a trigger." examples: - name: Triggers_Create text: |- @@ -753,7 +756,7 @@ helps['datafactory trigger update'] = """ type: command - short-summary: "Creates or updates a trigger." + short-summary: "Update a trigger." examples: - name: Triggers_Update text: |- diff --git a/src/datafactory/azext_datafactory/generated/_params.py b/src/datafactory/azext_datafactory/generated/_params.py index 85dcca4f8b3..19e001c560f 100644 --- a/src/datafactory/azext_datafactory/generated/_params.py +++ b/src/datafactory/azext_datafactory/generated/_params.py @@ -57,6 +57,8 @@ def load_arguments(self, _): 'GitHub repo information.', arg_group='RepoConfiguration') c.argument('global_parameters', type=validate_file_or_dict, help='List of parameters for factory. Expected ' 'value: json-string/@json-file.') + c.argument('public_network_access', arg_type=get_enum_type(['Enabled', 'Disabled']), help='Whether or not ' + 'public network access is allowed for the data factory.') with self.argument_context('datafactory factory update') as c: c.argument('resource_group_name', resource_group_name_type) diff --git a/src/datafactory/azext_datafactory/generated/custom.py b/src/datafactory/azext_datafactory/generated/custom.py index 513e21ca96d..de15a604498 100644 --- a/src/datafactory/azext_datafactory/generated/custom.py +++ b/src/datafactory/azext_datafactory/generated/custom.py @@ -39,7 +39,8 @@ def datafactory_factory_create(client, tags=None, factory_vsts_configuration=None, factory_git_hub_configuration=None, - global_parameters=None): + global_parameters=None, + public_network_access=None): all_repo_configuration = [] if factory_vsts_configuration is not None: all_repo_configuration.append(factory_vsts_configuration) @@ -56,7 +57,8 @@ def datafactory_factory_create(client, tags=tags, identity=None, repo_configuration=repo_configuration, - global_parameters=global_parameters) + global_parameters=global_parameters, + public_network_access=public_network_access) def datafactory_factory_update(client, @@ -66,7 +68,7 @@ def datafactory_factory_update(client, return client.update(resource_group_name=resource_group_name, factory_name=factory_name, tags=tags, - identity={"type": "SystemAssigned"}) + identity=json.loads("{\"type\": \"SystemAssigned\"}")) def datafactory_factory_delete(client, diff --git a/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario.py b/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario.py index bdc9fd88e23..f3cef88ea30 100644 --- a/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario.py +++ b/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario.py @@ -17,6 +17,7 @@ TEST_DIR = os.path.abspath(os.path.join(os.path.abspath(__file__), '..')) +# Env setup @try_manual def setup(test, rg): pass @@ -671,11 +672,13 @@ def step_factories_delete(test, rg): checks=[]) +# Env cleanup @try_manual def cleanup(test, rg): pass +# Testcase @try_manual def call_scenario(test, rg): setup(test, rg) diff --git a/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario_coverage.md b/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario_coverage.md deleted file mode 100644 index 9f0a5f555cf..00000000000 --- a/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario_coverage.md +++ /dev/null @@ -1,56 +0,0 @@ -|Scenario|Result|ErrorMessage|ErrorStack|ErrorNormalized|StartDt|EndDt| -|step_factories_createorupdate|successed||||2020-09-09 03:20:36.459026|2020-09-09 03:20:47.009536| -|step_factories_update|successed||||2020-09-09 03:13:09.171999|2020-09-09 03:13:17.158415| -|step_linkedservices_create|successed||||2020-09-09 03:13:17.158521|2020-09-09 03:13:18.391822| -|step_linkedservices_update|successed||||2020-09-09 03:13:18.391936|2020-09-09 03:13:20.086697| -|step_datasets_create|successed||||2020-09-09 03:13:20.086867|2020-09-09 03:13:21.030817| -|step_datasets_update|successed||||2020-09-09 03:13:21.030945|2020-09-09 03:13:23.212312| -|step_pipelines_create|successed||||2020-09-09 03:13:23.212418|2020-09-09 03:13:24.144046| -|step_pipelines_update|successed||||2020-09-09 03:13:24.144222|2020-09-09 03:13:25.633789| -|step_triggers_create|successed||||2020-09-09 03:13:25.633898|2020-09-09 03:13:26.656790| -|step_triggers_update|successed||||2020-09-09 03:13:26.656960|2020-09-09 03:13:28.908454| -|step_integrationruntimes_create|successed||||2020-09-09 03:13:28.908624|2020-09-09 03:13:30.048626| -|step_integrationruntimes_update|successed||||2020-09-09 03:13:30.048749|2020-09-09 03:13:30.718810| -|step_pipelines_createrun|successed||||2020-09-09 03:20:58.002857|2020-09-09 03:20:59.633723| -|step_integrationruntimes_get|successed||||2020-09-09 03:14:47.710770|2020-09-09 03:14:48.302708| -|step_reruntriggers_listbytrigger|successed||||2020-09-09 03:13:55.851345|2020-09-09 03:13:55.851351| -|step_linkedservices_get|successed||||2020-09-09 03:13:55.851669|2020-09-09 03:13:56.412982| -|step_pipelines_get|successed||||2020-09-09 03:13:56.413198|2020-09-09 03:13:57.000576| -|step_datasets_get|successed||||2020-09-09 03:13:57.000711|2020-09-09 03:13:57.627004| -|step_triggers_get|successed||||2020-09-09 03:21:05.240434|2020-09-09 03:21:05.843100| -|step_integrationruntimes_listbyfactory|successed||||2020-09-09 03:13:58.229772|2020-09-09 03:13:58.825227| -|step_linkedservices_listbyfactory|successed||||2020-09-09 03:13:58.825356|2020-09-09 03:13:59.474421| -|step_pipelines_listbyfactory|successed||||2020-09-09 03:13:59.474573|2020-09-09 03:14:00.075676| -|step_triggers_listbyfactory|successed||||2020-09-09 03:14:00.075778|2020-09-09 03:14:00.659819| -|step_datasets_listbyfactory|successed||||2020-09-09 03:14:00.659993|2020-09-09 03:14:01.933289| -|step_factories_get|successed||||2020-09-09 03:14:01.933391|2020-09-09 03:14:02.496092| -|step_factories_listbyresourcegroup|successed||||2020-09-09 03:14:02.496256|2020-09-09 03:14:03.065882| -|step_factories_list|successed||||2020-09-09 03:14:03.066110|2020-09-09 03:14:03.652226| -|step_operations_list|successed||||2020-09-09 03:14:03.652474|2020-09-09 03:14:03.652478| -|step_integrationruntimes_regenerateauthkey|successed||||2020-09-09 03:14:03.652652|2020-09-09 03:14:04.485017| -|step_integrationruntimes_synccredentials|successed||||2020-09-09 03:14:04.485147|2020-09-09 03:14:05.267252| -|step_integrationruntimes_getmonitoringdata|successed||||2020-09-09 03:14:05.267586|2020-09-09 03:14:05.885271| -|step_integrationruntimes_listauthkeys|successed||||2020-09-09 03:14:05.885388|2020-09-09 03:14:06.561146| -|step_integrationruntimes_upgrade|successed||||2020-09-09 03:14:06.561314|2020-09-09 03:14:07.329602| -|step_integrationruntimes_getstatus|successed||||2020-09-09 03:14:07.329744|2020-09-09 03:14:07.953859| -|step_triggers_geteventsubscriptionstatus|successed||||2020-09-09 03:14:07.953969|2020-09-09 03:14:08.601782| -|step_triggers_unsubscribefromevents|successed||||2020-09-09 03:14:08.601956|2020-09-09 03:14:09.397017| -|step_triggers_subscribetoevents|successed||||2020-09-09 03:14:09.397108|2020-09-09 03:14:10.182324| -|step_triggers_start|successed||||2020-09-09 03:21:02.232388|2020-09-09 03:21:05.240220| -|step_triggers_stop|successed||||2020-09-09 03:26:10.167791|2020-09-09 03:26:12.927439| -|step_factories_getdataplaneaccess|successed||||2020-09-09 03:14:15.489956|2020-09-09 03:14:16.213205| -|step_triggerruns_querybyfactory|successed||||2020-09-09 03:26:09.523472|2020-09-09 03:26:10.167619| -|step_factories_configurefactoryrepo|successed||||2020-09-09 03:14:16.845877|2020-09-09 03:14:19.093687| -|step_integrationruntimes_delete|successed||||2020-09-09 03:20:29.384647|2020-09-09 03:20:31.116770| -|step_triggers_delete|successed||||2020-09-09 03:26:12.927659|2020-09-09 03:26:17.287524| -|step_pipelines_delete|successed||||2020-09-09 03:26:17.287730|2020-09-09 03:26:21.728041| -|step_datasets_delete|successed||||2020-09-09 03:14:27.023092|2020-09-09 03:14:28.558416| -|step_linkedservices_delete|successed||||2020-09-09 03:14:28.558520|2020-09-09 03:14:29.608796| -|step_factories_delete|successed||||2020-09-09 03:26:21.728199|2020-09-09 03:26:28.588917| -|step_integrationruntimes_start|successed||||2020-09-09 03:14:48.302813|2020-09-09 03:16:06.820258| -|step_integrationruntimes_stop|successed||||2020-09-09 03:16:06.820543|2020-09-09 03:20:29.384268| -|step_pipelineruns_get|successed||||2020-09-09 03:20:55.257577|2020-09-09 03:20:56.603108| -|step_activityruns_querybypipelinerun|successed||||2020-09-09 03:20:56.603386|2020-09-09 03:20:58.002421| -|step_pipelineruns_cancel|successed||||2020-09-09 03:20:59.634195|2020-09-09 03:21:00.580469| -|step_triggerruns_rerun|successed||||2020-09-09 03:26:07.876793|2020-09-09 03:26:09.523263| -Coverage: 54/54 diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_configuration.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_configuration.py index 84eac8676c6..3e3cbab9738 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_configuration.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_configuration.py @@ -48,8 +48,7 @@ def __init__( self.credential = credential self.subscription_id = subscription_id self.api_version = "2018-06-01" - self.credential_scopes = ['https://management.azure.com/.default'] - self.credential_scopes.extend(kwargs.pop('credential_scopes', [])) + self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) kwargs.setdefault('sdk_moniker', 'datafactorymanagementclient/{}'.format(VERSION)) self._configure(**kwargs) diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/__init__.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/__init__.py index 5cde5bc8d05..571673cab5c 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/__init__.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/__init__.py @@ -6,5 +6,5 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from ._data_factory_management_client_async import DataFactoryManagementClient +from ._data_factory_management_client import DataFactoryManagementClient __all__ = ['DataFactoryManagementClient'] diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_configuration_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_configuration.py similarity index 95% rename from src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_configuration_async.py rename to src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_configuration.py index 411d6c4a66e..c88a091bdb9 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_configuration_async.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_configuration.py @@ -45,8 +45,7 @@ def __init__( self.credential = credential self.subscription_id = subscription_id self.api_version = "2018-06-01" - self.credential_scopes = ['https://management.azure.com/.default'] - self.credential_scopes.extend(kwargs.pop('credential_scopes', [])) + self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) kwargs.setdefault('sdk_moniker', 'datafactorymanagementclient/{}'.format(VERSION)) self._configure(**kwargs) diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client.py similarity index 79% rename from src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client_async.py rename to src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client.py index b2b322686b8..0af06cc2e8a 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client_async.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client.py @@ -15,24 +15,24 @@ # pylint: disable=unused-import,ungrouped-imports from azure.core.credentials_async import AsyncTokenCredential -from ._configuration_async import DataFactoryManagementClientConfiguration -from .operations_async import OperationOperations -from .operations_async import FactoryOperations -from .operations_async import ExposureControlOperations -from .operations_async import IntegrationRuntimeOperations -from .operations_async import IntegrationRuntimeObjectMetadataOperations -from .operations_async import IntegrationRuntimeNodeOperations -from .operations_async import LinkedServiceOperations -from .operations_async import DatasetOperations -from .operations_async import PipelineOperations -from .operations_async import PipelineRunOperations -from .operations_async import ActivityRunOperations -from .operations_async import TriggerOperations -from .operations_async import TriggerRunOperations -from .operations_async import DataFlowOperations -from .operations_async import DataFlowDebugSessionOperations -from .operations_async import ManagedVirtualNetworkOperations -from .operations_async import ManagedPrivateEndpointOperations +from ._configuration import DataFactoryManagementClientConfiguration +from .operations import OperationOperations +from .operations import FactoryOperations +from .operations import ExposureControlOperations +from .operations import IntegrationRuntimeOperations +from .operations import IntegrationRuntimeObjectMetadataOperations +from .operations import IntegrationRuntimeNodeOperations +from .operations import LinkedServiceOperations +from .operations import DatasetOperations +from .operations import PipelineOperations +from .operations import PipelineRunOperations +from .operations import ActivityRunOperations +from .operations import TriggerOperations +from .operations import TriggerRunOperations +from .operations import DataFlowOperations +from .operations import DataFlowDebugSessionOperations +from .operations import ManagedVirtualNetworkOperations +from .operations import ManagedPrivateEndpointOperations from .. import models @@ -40,39 +40,39 @@ class DataFactoryManagementClient(object): """The Azure Data Factory V2 management API provides a RESTful set of web services that interact with Azure Data Factory V2 services. :ivar operation: OperationOperations operations - :vartype operation: data_factory_management_client.aio.operations_async.OperationOperations + :vartype operation: data_factory_management_client.aio.operations.OperationOperations :ivar factory: FactoryOperations operations - :vartype factory: data_factory_management_client.aio.operations_async.FactoryOperations + :vartype factory: data_factory_management_client.aio.operations.FactoryOperations :ivar exposure_control: ExposureControlOperations operations - :vartype exposure_control: data_factory_management_client.aio.operations_async.ExposureControlOperations + :vartype exposure_control: data_factory_management_client.aio.operations.ExposureControlOperations :ivar integration_runtime: IntegrationRuntimeOperations operations - :vartype integration_runtime: data_factory_management_client.aio.operations_async.IntegrationRuntimeOperations + :vartype integration_runtime: data_factory_management_client.aio.operations.IntegrationRuntimeOperations :ivar integration_runtime_object_metadata: IntegrationRuntimeObjectMetadataOperations operations - :vartype integration_runtime_object_metadata: data_factory_management_client.aio.operations_async.IntegrationRuntimeObjectMetadataOperations + :vartype integration_runtime_object_metadata: data_factory_management_client.aio.operations.IntegrationRuntimeObjectMetadataOperations :ivar integration_runtime_node: IntegrationRuntimeNodeOperations operations - :vartype integration_runtime_node: data_factory_management_client.aio.operations_async.IntegrationRuntimeNodeOperations + :vartype integration_runtime_node: data_factory_management_client.aio.operations.IntegrationRuntimeNodeOperations :ivar linked_service: LinkedServiceOperations operations - :vartype linked_service: data_factory_management_client.aio.operations_async.LinkedServiceOperations + :vartype linked_service: data_factory_management_client.aio.operations.LinkedServiceOperations :ivar dataset: DatasetOperations operations - :vartype dataset: data_factory_management_client.aio.operations_async.DatasetOperations + :vartype dataset: data_factory_management_client.aio.operations.DatasetOperations :ivar pipeline: PipelineOperations operations - :vartype pipeline: data_factory_management_client.aio.operations_async.PipelineOperations + :vartype pipeline: data_factory_management_client.aio.operations.PipelineOperations :ivar pipeline_run: PipelineRunOperations operations - :vartype pipeline_run: data_factory_management_client.aio.operations_async.PipelineRunOperations + :vartype pipeline_run: data_factory_management_client.aio.operations.PipelineRunOperations :ivar activity_run: ActivityRunOperations operations - :vartype activity_run: data_factory_management_client.aio.operations_async.ActivityRunOperations + :vartype activity_run: data_factory_management_client.aio.operations.ActivityRunOperations :ivar trigger: TriggerOperations operations - :vartype trigger: data_factory_management_client.aio.operations_async.TriggerOperations + :vartype trigger: data_factory_management_client.aio.operations.TriggerOperations :ivar trigger_run: TriggerRunOperations operations - :vartype trigger_run: data_factory_management_client.aio.operations_async.TriggerRunOperations + :vartype trigger_run: data_factory_management_client.aio.operations.TriggerRunOperations :ivar data_flow: DataFlowOperations operations - :vartype data_flow: data_factory_management_client.aio.operations_async.DataFlowOperations + :vartype data_flow: data_factory_management_client.aio.operations.DataFlowOperations :ivar data_flow_debug_session: DataFlowDebugSessionOperations operations - :vartype data_flow_debug_session: data_factory_management_client.aio.operations_async.DataFlowDebugSessionOperations + :vartype data_flow_debug_session: data_factory_management_client.aio.operations.DataFlowDebugSessionOperations :ivar managed_virtual_network: ManagedVirtualNetworkOperations operations - :vartype managed_virtual_network: data_factory_management_client.aio.operations_async.ManagedVirtualNetworkOperations + :vartype managed_virtual_network: data_factory_management_client.aio.operations.ManagedVirtualNetworkOperations :ivar managed_private_endpoint: ManagedPrivateEndpointOperations operations - :vartype managed_private_endpoint: data_factory_management_client.aio.operations_async.ManagedPrivateEndpointOperations + :vartype managed_private_endpoint: data_factory_management_client.aio.operations.ManagedPrivateEndpointOperations :param credential: Credential needed for the client to connect to Azure. :type credential: ~azure.core.credentials_async.AsyncTokenCredential :param subscription_id: The subscription identifier. diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/__init__.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/__init__.py new file mode 100644 index 00000000000..3f6a32ff284 --- /dev/null +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/__init__.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._operation_operations import OperationOperations +from ._factory_operations import FactoryOperations +from ._exposure_control_operations import ExposureControlOperations +from ._integration_runtime_operations import IntegrationRuntimeOperations +from ._integration_runtime_object_metadata_operations import IntegrationRuntimeObjectMetadataOperations +from ._integration_runtime_node_operations import IntegrationRuntimeNodeOperations +from ._linked_service_operations import LinkedServiceOperations +from ._dataset_operations import DatasetOperations +from ._pipeline_operations import PipelineOperations +from ._pipeline_run_operations import PipelineRunOperations +from ._activity_run_operations import ActivityRunOperations +from ._trigger_operations import TriggerOperations +from ._trigger_run_operations import TriggerRunOperations +from ._data_flow_operations import DataFlowOperations +from ._data_flow_debug_session_operations import DataFlowDebugSessionOperations +from ._managed_virtual_network_operations import ManagedVirtualNetworkOperations +from ._managed_private_endpoint_operations import ManagedPrivateEndpointOperations + +__all__ = [ + 'OperationOperations', + 'FactoryOperations', + 'ExposureControlOperations', + 'IntegrationRuntimeOperations', + 'IntegrationRuntimeObjectMetadataOperations', + 'IntegrationRuntimeNodeOperations', + 'LinkedServiceOperations', + 'DatasetOperations', + 'PipelineOperations', + 'PipelineRunOperations', + 'ActivityRunOperations', + 'TriggerOperations', + 'TriggerRunOperations', + 'DataFlowOperations', + 'DataFlowDebugSessionOperations', + 'ManagedVirtualNetworkOperations', + 'ManagedPrivateEndpointOperations', +] diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_activity_run_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_activity_run_operations.py similarity index 94% rename from src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_activity_run_operations_async.py rename to src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_activity_run_operations.py index 0d2e56be08b..4c839ebf1a1 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_activity_run_operations_async.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_activity_run_operations.py @@ -9,7 +9,7 @@ from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.mgmt.core.exceptions import ARMErrorFormat @@ -80,12 +80,15 @@ async def query_by_pipeline_run( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ActivityRunsQueryResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) filter_parameters = models.RunFilterParameters(continuation_token=continuation_token_parameter, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, order_by=order_by) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.query_by_pipeline_run.metadata['url'] # type: ignore @@ -104,13 +107,12 @@ async def query_by_pipeline_run( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_data_flow_debug_session_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_data_flow_debug_session_operations.py similarity index 91% rename from src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_data_flow_debug_session_operations_async.py rename to src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_data_flow_debug_session_operations.py index f1bf8ee8f73..8f649e05a97 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_data_flow_debug_session_operations_async.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_data_flow_debug_session_operations.py @@ -9,7 +9,7 @@ import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod @@ -55,12 +55,15 @@ async def _create_initial( **kwargs ) -> Optional["models.CreateDataFlowDebugSessionResponse"]: cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.CreateDataFlowDebugSessionResponse"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) request = models.CreateDataFlowDebugSessionRequest(compute_type=compute_type, core_count=core_count, time_to_live=time_to_live, name=name, properties=properties) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self._create_initial.metadata['url'] # type: ignore @@ -78,13 +81,12 @@ async def _create_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(request, 'CreateDataFlowDebugSessionRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -175,7 +177,13 @@ def get_long_running_output(pipeline_response): return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: @@ -207,14 +215,17 @@ def query_by_factory( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.QueryDataFlowDebugSessionsResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -309,12 +320,15 @@ async def add_data_flow( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.AddDataFlowToDebugSessionResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) request = models.DataFlowDebugPackage(session_id=session_id, datasets=datasets, linked_services=linked_services, source_settings=source_settings, parameters_debug_settings_parameters=parameters, dataset_parameters=dataset_parameters, folder_path=folder_path, reference_name=reference_name, name=name, properties=properties) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.add_data_flow.metadata['url'] # type: ignore @@ -332,13 +346,12 @@ async def add_data_flow( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(request, 'DataFlowDebugPackage') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -375,12 +388,15 @@ async def delete( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) request = models.DeleteDataFlowDebugSessionRequest(session_id=session_id) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.delete.metadata['url'] # type: ignore @@ -398,12 +414,12 @@ async def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(request, 'DeleteDataFlowDebugSessionRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -426,12 +442,15 @@ async def _execute_command_initial( **kwargs ) -> Optional["models.DataFlowDebugCommandResponse"]: cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.DataFlowDebugCommandResponse"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) request = models.DataFlowDebugCommandRequest(session_id=session_id, command=command, command_payload=command_payload) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self._execute_command_initial.metadata['url'] # type: ignore @@ -449,13 +468,12 @@ async def _execute_command_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(request, 'DataFlowDebugCommandRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -536,7 +554,13 @@ def get_long_running_output(pipeline_response): return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_data_flow_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_data_flow_operations.py similarity index 92% rename from src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_data_flow_operations_async.py rename to src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_data_flow_operations.py index b5c2e5656ce..cfcf1a8a5d9 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_data_flow_operations_async.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_data_flow_operations.py @@ -9,7 +9,7 @@ import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.mgmt.core.exceptions import ARMErrorFormat @@ -69,12 +69,15 @@ async def create_or_update( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) data_flow = models.DataFlowResource(properties=properties) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_or_update.metadata['url'] # type: ignore @@ -95,13 +98,12 @@ async def create_or_update( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(data_flow, 'DataFlowResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -142,9 +144,12 @@ async def get( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore @@ -164,7 +169,7 @@ async def get( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -203,9 +208,12 @@ async def delete( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.delete.metadata['url'] # type: ignore @@ -223,6 +231,7 @@ async def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -255,14 +264,17 @@ def list_by_factory( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_dataset_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_dataset_operations.py similarity index 92% rename from src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_dataset_operations_async.py rename to src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_dataset_operations.py index a8be0369365..68a7ca628d4 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_dataset_operations_async.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_dataset_operations.py @@ -9,7 +9,7 @@ import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.mgmt.core.exceptions import ARMErrorFormat @@ -59,14 +59,17 @@ def list_by_factory( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.DatasetListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -140,12 +143,15 @@ async def create_or_update( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.DatasetResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) dataset = models.DatasetResource(properties=properties) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_or_update.metadata['url'] # type: ignore @@ -166,13 +172,12 @@ async def create_or_update( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(dataset, 'DatasetResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -213,9 +218,12 @@ async def get( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.DatasetResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore @@ -235,7 +243,7 @@ async def get( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -276,9 +284,12 @@ async def delete( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.delete.metadata['url'] # type: ignore @@ -296,6 +307,7 @@ async def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_exposure_control_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_exposure_control_operations.py similarity index 92% rename from src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_exposure_control_operations_async.py rename to src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_exposure_control_operations.py index b20acb1c3c8..31ca6ef5886 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_exposure_control_operations_async.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_exposure_control_operations.py @@ -8,7 +8,7 @@ from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.mgmt.core.exceptions import ARMErrorFormat @@ -61,12 +61,15 @@ async def get_feature_value( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ExposureControlResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) exposure_control_request = models.ExposureControlRequest(feature_name=feature_name, feature_type=feature_type) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.get_feature_value.metadata['url'] # type: ignore @@ -83,13 +86,12 @@ async def get_feature_value( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(exposure_control_request, 'ExposureControlRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -129,12 +131,15 @@ async def get_feature_value_by_factory( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ExposureControlResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) exposure_control_request = models.ExposureControlRequest(feature_name=feature_name, feature_type=feature_type) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.get_feature_value_by_factory.metadata['url'] # type: ignore @@ -152,13 +157,12 @@ async def get_feature_value_by_factory( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(exposure_control_request, 'ExposureControlRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -195,12 +199,15 @@ async def query_feature_value_by_factory( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ExposureControlBatchResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) exposure_control_batch_request = models.ExposureControlBatchRequest(exposure_control_requests=exposure_control_requests) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.query_feature_value_by_factory.metadata['url'] # type: ignore @@ -218,13 +225,12 @@ async def query_feature_value_by_factory( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(exposure_control_batch_request, 'ExposureControlBatchRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_factory_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_factory_operations.py similarity index 91% rename from src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_factory_operations_async.py rename to src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_factory_operations.py index 46f37c1a6f7..8a84d44be7c 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_factory_operations_async.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_factory_operations.py @@ -5,11 +5,11 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.mgmt.core.exceptions import ARMErrorFormat @@ -53,14 +53,17 @@ def list( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.FactoryListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -125,12 +128,15 @@ async def configure_factory_repo( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.Factory"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) factory_repo_update = models.FactoryRepoUpdate(factory_resource_id=factory_resource_id, repo_configuration=repo_configuration) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.configure_factory_repo.metadata['url'] # type: ignore @@ -147,13 +153,12 @@ async def configure_factory_repo( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(factory_repo_update, 'FactoryRepoUpdate') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -184,14 +189,17 @@ def list_by_resource_group( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.FactoryListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -246,6 +254,7 @@ async def create_or_update( identity: Optional["models.FactoryIdentity"] = None, repo_configuration: Optional["models.FactoryRepoConfiguration"] = None, global_parameters: Optional[Dict[str, "models.GlobalParameterSpecification"]] = None, + public_network_access: Optional[Union[str, "models.PublicNetworkAccess"]] = None, **kwargs ) -> "models.Factory": """Creates or updates a factory. @@ -267,18 +276,24 @@ async def create_or_update( :type repo_configuration: ~data_factory_management_client.models.FactoryRepoConfiguration :param global_parameters: List of parameters for factory. :type global_parameters: dict[str, ~data_factory_management_client.models.GlobalParameterSpecification] + :param public_network_access: Whether or not public network access is allowed for the data + factory. + :type public_network_access: str or ~data_factory_management_client.models.PublicNetworkAccess :keyword callable cls: A custom type or function that will be passed the direct response :return: Factory, or the result of cls(response) :rtype: ~data_factory_management_client.models.Factory :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.Factory"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - factory = models.Factory(location=location, tags=tags, identity=identity, repo_configuration=repo_configuration, global_parameters=global_parameters) + factory = models.Factory(location=location, tags=tags, identity=identity, repo_configuration=repo_configuration, global_parameters=global_parameters, public_network_access=public_network_access) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_or_update.metadata['url'] # type: ignore @@ -298,13 +313,12 @@ async def create_or_update( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(factory, 'Factory') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -344,12 +358,15 @@ async def update( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.Factory"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) factory_update_parameters = models.FactoryUpdateParameters(tags=tags, identity=identity) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.update.metadata['url'] # type: ignore @@ -367,13 +384,12 @@ async def update( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(factory_update_parameters, 'FactoryUpdateParameters') body_content_kwargs['content'] = body_content request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -411,9 +427,12 @@ async def get( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.Factory"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore @@ -432,7 +451,7 @@ async def get( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -470,9 +489,12 @@ async def delete( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.delete.metadata['url'] # type: ignore @@ -489,6 +511,7 @@ async def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -530,12 +553,15 @@ async def get_git_hub_access_token( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.GitHubAccessTokenResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) git_hub_access_token_request = models.GitHubAccessTokenRequest(git_hub_access_code=git_hub_access_code, git_hub_client_id=git_hub_client_id, git_hub_access_token_base_url=git_hub_access_token_base_url) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.get_git_hub_access_token.metadata['url'] # type: ignore @@ -553,13 +579,12 @@ async def get_git_hub_access_token( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(git_hub_access_token_request, 'GitHubAccessTokenRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -612,12 +637,15 @@ async def get_data_plane_access( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.AccessPolicyResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) policy = models.UserAccessPolicy(permissions=permissions, access_resource_path=access_resource_path, profile_name=profile_name, start_time=start_time, expire_time=expire_time) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.get_data_plane_access.metadata['url'] # type: ignore @@ -635,13 +663,12 @@ async def get_data_plane_access( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(policy, 'UserAccessPolicy') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_node_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtime_node_operations.py similarity index 93% rename from src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_node_operations_async.py rename to src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtime_node_operations.py index a6022196653..223efd027dc 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_node_operations_async.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtime_node_operations.py @@ -8,7 +8,7 @@ from typing import Any, Callable, Dict, Generic, Optional, TypeVar import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.mgmt.core.exceptions import ARMErrorFormat @@ -64,9 +64,12 @@ async def get( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SelfHostedIntegrationRuntimeNode"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore @@ -85,7 +88,7 @@ async def get( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -127,9 +130,12 @@ async def delete( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.delete.metadata['url'] # type: ignore @@ -148,6 +154,7 @@ async def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -190,12 +197,15 @@ async def update( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SelfHostedIntegrationRuntimeNode"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) update_integration_runtime_node_request = models.UpdateIntegrationRuntimeNodeRequest(concurrent_jobs_limit=concurrent_jobs_limit) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.update.metadata['url'] # type: ignore @@ -215,13 +225,12 @@ async def update( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(update_integration_runtime_node_request, 'UpdateIntegrationRuntimeNodeRequest') body_content_kwargs['content'] = body_content request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -261,9 +270,12 @@ async def get_ip_address( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeNodeIpAddress"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get_ip_address.metadata['url'] # type: ignore @@ -282,7 +294,7 @@ async def get_ip_address( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_object_metadata_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtime_object_metadata_operations.py similarity index 89% rename from src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_object_metadata_operations_async.py rename to src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtime_object_metadata_operations.py index 70df0716c21..3379239a297 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_object_metadata_operations_async.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtime_object_metadata_operations.py @@ -8,7 +8,7 @@ from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod @@ -50,9 +50,12 @@ async def _refresh_initial( **kwargs ) -> Optional["models.SsisObjectMetadataStatusResponse"]: cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.SsisObjectMetadataStatusResponse"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self._refresh_initial.metadata['url'] # type: ignore @@ -70,7 +73,7 @@ async def _refresh_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -141,7 +144,14 @@ def get_long_running_output(pipeline_response): return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: @@ -180,12 +190,15 @@ async def get( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SsisObjectMetadataListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) get_metadata_request = models.GetSsisObjectMetadataRequest(metadata_path=metadata_path) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore @@ -204,7 +217,7 @@ async def get( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] if get_metadata_request is not None: @@ -213,7 +226,6 @@ async def get( body_content = None body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtime_operations.py similarity index 91% rename from src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_operations_async.py rename to src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtime_operations.py index 82b285c7a74..95540948b5b 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_operations_async.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtime_operations.py @@ -9,7 +9,7 @@ import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod @@ -61,14 +61,17 @@ def list_by_factory( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -142,12 +145,15 @@ async def create_or_update( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) integration_runtime = models.IntegrationRuntimeResource(properties=properties) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_or_update.metadata['url'] # type: ignore @@ -168,13 +174,12 @@ async def create_or_update( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(integration_runtime, 'IntegrationRuntimeResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -216,9 +221,12 @@ async def get( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.IntegrationRuntimeResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore @@ -238,7 +246,7 @@ async def get( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -287,12 +295,15 @@ async def update( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) update_integration_runtime_request = models.UpdateIntegrationRuntimeRequest(auto_update=auto_update, update_delay_offset=update_delay_offset) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.update.metadata['url'] # type: ignore @@ -311,13 +322,12 @@ async def update( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(update_integration_runtime_request, 'UpdateIntegrationRuntimeRequest') body_content_kwargs['content'] = body_content request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -354,9 +364,12 @@ async def delete( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.delete.metadata['url'] # type: ignore @@ -374,6 +387,7 @@ async def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -409,9 +423,12 @@ async def get_status( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get_status.metadata['url'] # type: ignore @@ -429,7 +446,7 @@ async def get_status( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -469,9 +486,12 @@ async def get_connection_info( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeConnectionInfo"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get_connection_info.metadata['url'] # type: ignore @@ -489,7 +509,7 @@ async def get_connection_info( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -531,12 +551,15 @@ async def regenerate_auth_key( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeAuthKeys"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) regenerate_key_parameters = models.IntegrationRuntimeRegenerateKeyParameters(key_name=key_name) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.regenerate_auth_key.metadata['url'] # type: ignore @@ -555,13 +578,12 @@ async def regenerate_auth_key( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(regenerate_key_parameters, 'IntegrationRuntimeRegenerateKeyParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -598,9 +620,12 @@ async def list_auth_key( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeAuthKeys"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.list_auth_key.metadata['url'] # type: ignore @@ -618,7 +643,7 @@ async def list_auth_key( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -644,9 +669,12 @@ async def _start_initial( **kwargs ) -> Optional["models.IntegrationRuntimeStatusResponse"]: cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.IntegrationRuntimeStatusResponse"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self._start_initial.metadata['url'] # type: ignore @@ -664,7 +692,7 @@ async def _start_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -735,7 +763,14 @@ def get_long_running_output(pipeline_response): return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: @@ -757,9 +792,12 @@ async def _stop_initial( **kwargs ) -> None: cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self._stop_initial.metadata['url'] # type: ignore @@ -777,6 +815,7 @@ async def _stop_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -839,7 +878,14 @@ def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: @@ -877,9 +923,12 @@ async def sync_credentials( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.sync_credentials.metadata['url'] # type: ignore @@ -897,6 +946,7 @@ async def sync_credentials( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -933,9 +983,12 @@ async def get_monitoring_data( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeMonitoringData"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get_monitoring_data.metadata['url'] # type: ignore @@ -953,7 +1006,7 @@ async def get_monitoring_data( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -992,9 +1045,12 @@ async def upgrade( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.upgrade.metadata['url'] # type: ignore @@ -1012,6 +1068,7 @@ async def upgrade( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -1051,12 +1108,15 @@ async def remove_link( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) linked_integration_runtime_request = models.LinkedIntegrationRuntimeRequest(linked_factory_name=linked_factory_name) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.remove_link.metadata['url'] # type: ignore @@ -1075,12 +1135,12 @@ async def remove_link( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(linked_integration_runtime_request, 'LinkedIntegrationRuntimeRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -1129,12 +1189,15 @@ async def create_linked_integration_runtime( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) create_linked_integration_runtime_request = models.CreateLinkedIntegrationRuntimeRequest(name=name, subscription_id=subscription_id, data_factory_name=data_factory_name, data_factory_location=data_factory_location) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_linked_integration_runtime.metadata['url'] # type: ignore @@ -1153,13 +1216,12 @@ async def create_linked_integration_runtime( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(create_linked_integration_runtime_request, 'CreateLinkedIntegrationRuntimeRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_linked_service_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_linked_service_operations.py similarity index 93% rename from src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_linked_service_operations_async.py rename to src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_linked_service_operations.py index 56e9e6f663a..83de062ae3b 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_linked_service_operations_async.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_linked_service_operations.py @@ -9,7 +9,7 @@ import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.mgmt.core.exceptions import ARMErrorFormat @@ -59,14 +59,17 @@ def list_by_factory( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -140,12 +143,15 @@ async def create_or_update( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) linked_service = models.LinkedServiceResource(properties=properties) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_or_update.metadata['url'] # type: ignore @@ -166,13 +172,12 @@ async def create_or_update( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(linked_service, 'LinkedServiceResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -214,9 +219,12 @@ async def get( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.LinkedServiceResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore @@ -236,7 +244,7 @@ async def get( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -277,9 +285,12 @@ async def delete( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.delete.metadata['url'] # type: ignore @@ -297,6 +308,7 @@ async def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_managed_private_endpoint_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_managed_private_endpoint_operations.py similarity index 94% rename from src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_managed_private_endpoint_operations_async.py rename to src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_managed_private_endpoint_operations.py index 3a899779963..ef1fe194a45 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_managed_private_endpoint_operations_async.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_managed_private_endpoint_operations.py @@ -9,7 +9,7 @@ import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.mgmt.core.exceptions import ARMErrorFormat @@ -62,14 +62,17 @@ def list_by_factory( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -157,12 +160,15 @@ async def create_or_update( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) managed_private_endpoint = models.ManagedPrivateEndpointResource(connection_state=connection_state, fqdns=fqdns, group_id=group_id, private_link_resource_id=private_link_resource_id) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_or_update.metadata['url'] # type: ignore @@ -184,13 +190,12 @@ async def create_or_update( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(managed_private_endpoint, 'ManagedPrivateEndpointResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -235,9 +240,12 @@ async def get( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore @@ -258,7 +266,7 @@ async def get( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -300,9 +308,12 @@ async def delete( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.delete.metadata['url'] # type: ignore @@ -321,6 +332,7 @@ async def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_managed_virtual_network_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_managed_virtual_network_operations.py similarity index 93% rename from src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_managed_virtual_network_operations_async.py rename to src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_managed_virtual_network_operations.py index 2152988d7ef..bfddec510a9 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_managed_virtual_network_operations_async.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_managed_virtual_network_operations.py @@ -9,7 +9,7 @@ import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.mgmt.core.exceptions import ARMErrorFormat @@ -59,14 +59,17 @@ def list_by_factory( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -140,12 +143,15 @@ async def create_or_update( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) managed_virtual_network = models.ManagedVirtualNetworkResource(properties=properties) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_or_update.metadata['url'] # type: ignore @@ -166,13 +172,12 @@ async def create_or_update( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(managed_virtual_network, 'ManagedVirtualNetworkResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -214,9 +219,12 @@ async def get( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore @@ -236,7 +244,7 @@ async def get( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_operation_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_operation_operations.py similarity index 91% rename from src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_operation_operations_async.py rename to src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_operation_operations.py index 83206d77039..a15920ebee4 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_operation_operations_async.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_operation_operations.py @@ -9,7 +9,7 @@ import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.mgmt.core.exceptions import ARMErrorFormat @@ -53,14 +53,17 @@ def list( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.OperationListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_pipeline_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_pipeline_operations.py similarity index 93% rename from src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_pipeline_operations_async.py rename to src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_pipeline_operations.py index 34c7453f951..21613019909 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_pipeline_operations_async.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_pipeline_operations.py @@ -9,7 +9,7 @@ import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.mgmt.core.exceptions import ARMErrorFormat @@ -59,14 +59,17 @@ def list_by_factory( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -140,10 +143,13 @@ async def create_or_update( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_or_update.metadata['url'] # type: ignore @@ -164,13 +170,12 @@ async def create_or_update( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(pipeline, 'PipelineResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -211,9 +216,12 @@ async def get( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.PipelineResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore @@ -233,7 +241,7 @@ async def get( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -274,9 +282,12 @@ async def delete( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.delete.metadata['url'] # type: ignore @@ -294,6 +305,7 @@ async def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -349,10 +361,13 @@ async def create_run( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.CreateRunResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_run.metadata['url'] # type: ignore @@ -379,7 +394,7 @@ async def create_run( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] if parameters is not None: @@ -388,7 +403,6 @@ async def create_run( body_content = None body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_pipeline_run_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_pipeline_run_operations.py similarity index 92% rename from src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_pipeline_run_operations_async.py rename to src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_pipeline_run_operations.py index 5cdfd09fe01..0f4e74f9ec6 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_pipeline_run_operations_async.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_pipeline_run_operations.py @@ -9,7 +9,7 @@ from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.mgmt.core.exceptions import ARMErrorFormat @@ -77,12 +77,15 @@ async def query_by_factory( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineRunsQueryResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) filter_parameters = models.RunFilterParameters(continuation_token=continuation_token_parameter, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, order_by=order_by) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.query_by_factory.metadata['url'] # type: ignore @@ -100,13 +103,12 @@ async def query_by_factory( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -143,9 +145,12 @@ async def get( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineRun"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore @@ -163,7 +168,7 @@ async def get( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -206,9 +211,12 @@ async def cancel( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.cancel.metadata['url'] # type: ignore @@ -228,6 +236,7 @@ async def cancel( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_trigger_operations.py similarity index 88% rename from src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_operations_async.py rename to src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_trigger_operations.py index f4669b45bc2..348082e81c1 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_operations_async.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_trigger_operations.py @@ -9,7 +9,7 @@ import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod @@ -61,14 +61,17 @@ def list_by_factory( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -140,12 +143,15 @@ async def query_by_factory( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerQueryResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) filter_parameters = models.TriggerFilterParameters(continuation_token=continuation_token_parameter, parent_trigger_name=parent_trigger_name) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.query_by_factory.metadata['url'] # type: ignore @@ -163,13 +169,12 @@ async def query_by_factory( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(filter_parameters, 'TriggerFilterParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -213,12 +218,15 @@ async def create_or_update( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) trigger = models.TriggerResource(properties=properties) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_or_update.metadata['url'] # type: ignore @@ -239,13 +247,12 @@ async def create_or_update( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(trigger, 'TriggerResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -286,9 +293,12 @@ async def get( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore @@ -308,7 +318,7 @@ async def get( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -349,9 +359,12 @@ async def delete( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.delete.metadata['url'] # type: ignore @@ -369,6 +382,7 @@ async def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -391,9 +405,12 @@ async def _subscribe_to_event_initial( **kwargs ) -> Optional["models.TriggerSubscriptionOperationStatus"]: cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerSubscriptionOperationStatus"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self._subscribe_to_event_initial.metadata['url'] # type: ignore @@ -411,7 +428,7 @@ async def _subscribe_to_event_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -482,7 +499,14 @@ def get_long_running_output(pipeline_response): return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: @@ -517,9 +541,12 @@ async def get_event_subscription_status( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get_event_subscription_status.metadata['url'] # type: ignore @@ -537,7 +564,7 @@ async def get_event_subscription_status( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -563,9 +590,12 @@ async def _unsubscribe_from_event_initial( **kwargs ) -> Optional["models.TriggerSubscriptionOperationStatus"]: cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerSubscriptionOperationStatus"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self._unsubscribe_from_event_initial.metadata['url'] # type: ignore @@ -583,7 +613,7 @@ async def _unsubscribe_from_event_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -654,7 +684,14 @@ def get_long_running_output(pipeline_response): return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: @@ -676,9 +713,12 @@ async def _start_initial( **kwargs ) -> None: cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self._start_initial.metadata['url'] # type: ignore @@ -696,6 +736,7 @@ async def _start_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -758,7 +799,14 @@ def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: @@ -780,9 +828,12 @@ async def _stop_initial( **kwargs ) -> None: cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self._stop_initial.metadata['url'] # type: ignore @@ -800,6 +851,7 @@ async def _stop_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -862,7 +914,14 @@ def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_run_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_trigger_run_operations.py similarity index 92% rename from src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_run_operations_async.py rename to src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_trigger_run_operations.py index 3401f9c95c1..61c41db164e 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_run_operations_async.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_trigger_run_operations.py @@ -9,7 +9,7 @@ from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.mgmt.core.exceptions import ARMErrorFormat @@ -65,9 +65,12 @@ async def rerun( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.rerun.metadata['url'] # type: ignore @@ -86,6 +89,7 @@ async def rerun( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -124,9 +128,12 @@ async def cancel( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.cancel.metadata['url'] # type: ignore @@ -145,6 +152,7 @@ async def cancel( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -195,12 +203,15 @@ async def query_by_factory( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerRunsQueryResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) filter_parameters = models.RunFilterParameters(continuation_token=continuation_token_parameter, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, order_by=order_by) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.query_by_factory.metadata['url'] # type: ignore @@ -218,13 +229,12 @@ async def query_by_factory( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/__init__.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/__init__.py deleted file mode 100644 index 554e3ba9232..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/__init__.py +++ /dev/null @@ -1,45 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from ._operation_operations_async import OperationOperations -from ._factory_operations_async import FactoryOperations -from ._exposure_control_operations_async import ExposureControlOperations -from ._integration_runtime_operations_async import IntegrationRuntimeOperations -from ._integration_runtime_object_metadata_operations_async import IntegrationRuntimeObjectMetadataOperations -from ._integration_runtime_node_operations_async import IntegrationRuntimeNodeOperations -from ._linked_service_operations_async import LinkedServiceOperations -from ._dataset_operations_async import DatasetOperations -from ._pipeline_operations_async import PipelineOperations -from ._pipeline_run_operations_async import PipelineRunOperations -from ._activity_run_operations_async import ActivityRunOperations -from ._trigger_operations_async import TriggerOperations -from ._trigger_run_operations_async import TriggerRunOperations -from ._data_flow_operations_async import DataFlowOperations -from ._data_flow_debug_session_operations_async import DataFlowDebugSessionOperations -from ._managed_virtual_network_operations_async import ManagedVirtualNetworkOperations -from ._managed_private_endpoint_operations_async import ManagedPrivateEndpointOperations - -__all__ = [ - 'OperationOperations', - 'FactoryOperations', - 'ExposureControlOperations', - 'IntegrationRuntimeOperations', - 'IntegrationRuntimeObjectMetadataOperations', - 'IntegrationRuntimeNodeOperations', - 'LinkedServiceOperations', - 'DatasetOperations', - 'PipelineOperations', - 'PipelineRunOperations', - 'ActivityRunOperations', - 'TriggerOperations', - 'TriggerRunOperations', - 'DataFlowOperations', - 'DataFlowDebugSessionOperations', - 'ManagedVirtualNetworkOperations', - 'ManagedPrivateEndpointOperations', -] diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/__init__.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/__init__.py index 160afca0562..4d780f2444c 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/__init__.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/__init__.py @@ -58,6 +58,12 @@ from ._models_py3 import AzureDataLakeStoreSink from ._models_py3 import AzureDataLakeStoreSource from ._models_py3 import AzureDataLakeStoreWriteSettings + from ._models_py3 import AzureDatabricksDeltaLakeDataset + from ._models_py3 import AzureDatabricksDeltaLakeExportCommand + from ._models_py3 import AzureDatabricksDeltaLakeImportCommand + from ._models_py3 import AzureDatabricksDeltaLakeLinkedService + from ._models_py3 import AzureDatabricksDeltaLakeSink + from ._models_py3 import AzureDatabricksDeltaLakeSource from ._models_py3 import AzureDatabricksLinkedService from ._models_py3 import AzureFileStorageLinkedService from ._models_py3 import AzureFileStorageLocation @@ -127,6 +133,7 @@ from ._models_py3 import ConnectionStateProperties from ._models_py3 import ControlActivity from ._models_py3 import CopyActivity + from ._models_py3 import CopyActivityLogSettings from ._models_py3 import CopySink from ._models_py3 import CopySource from ._models_py3 import CopyTranslator @@ -185,6 +192,8 @@ from ._models_py3 import DatasetResource from ._models_py3 import DatasetSchemaDataElement from ._models_py3 import DatasetStorageFormat + from ._models_py3 import DatasetTarCompression + from ._models_py3 import DatasetTarGZipCompression from ._models_py3 import DatasetZipDeflateCompression from ._models_py3 import Db2LinkedService from ._models_py3 import Db2Source @@ -347,6 +356,8 @@ from ._models_py3 import LinkedServiceListResponse from ._models_py3 import LinkedServiceReference from ._models_py3 import LinkedServiceResource + from ._models_py3 import LogLocationSettings + from ._models_py3 import LogSettings from ._models_py3 import LogStorageSettings from ._models_py3 import LookupActivity from ._models_py3 import MagentoLinkedService @@ -374,6 +385,9 @@ from ._models_py3 import MicrosoftAccessSink from ._models_py3 import MicrosoftAccessSource from ._models_py3 import MicrosoftAccessTableDataset + from ._models_py3 import MongoDBAtlasCollectionDataset + from ._models_py3 import MongoDBAtlasLinkedService + from ._models_py3 import MongoDBAtlasSource from ._models_py3 import MongoDBCollectionDataset from ._models_py3 import MongoDBCursorMethodsProperties from ._models_py3 import MongoDBLinkedService @@ -419,12 +433,14 @@ from ._models_py3 import OrcFormat from ._models_py3 import OrcSink from ._models_py3 import OrcSource + from ._models_py3 import OrcWriteSettings from ._models_py3 import PackageStore from ._models_py3 import ParameterSpecification from ._models_py3 import ParquetDataset from ._models_py3 import ParquetFormat from ._models_py3 import ParquetSink from ._models_py3 import ParquetSource + from ._models_py3 import ParquetWriteSettings from ._models_py3 import PaypalLinkedService from ._models_py3 import PaypalObjectDataset from ._models_py3 import PaypalSource @@ -577,6 +593,8 @@ from ._models_py3 import SybaseTableDataset from ._models_py3 import TabularSource from ._models_py3 import TabularTranslator + from ._models_py3 import TarGZipReadSettings + from ._models_py3 import TarReadSettings from ._models_py3 import TeradataLinkedService from ._models_py3 import TeradataPartitionSettings from ._models_py3 import TeradataSource @@ -680,6 +698,12 @@ from ._models import AzureDataLakeStoreSink # type: ignore from ._models import AzureDataLakeStoreSource # type: ignore from ._models import AzureDataLakeStoreWriteSettings # type: ignore + from ._models import AzureDatabricksDeltaLakeDataset # type: ignore + from ._models import AzureDatabricksDeltaLakeExportCommand # type: ignore + from ._models import AzureDatabricksDeltaLakeImportCommand # type: ignore + from ._models import AzureDatabricksDeltaLakeLinkedService # type: ignore + from ._models import AzureDatabricksDeltaLakeSink # type: ignore + from ._models import AzureDatabricksDeltaLakeSource # type: ignore from ._models import AzureDatabricksLinkedService # type: ignore from ._models import AzureFileStorageLinkedService # type: ignore from ._models import AzureFileStorageLocation # type: ignore @@ -749,6 +773,7 @@ from ._models import ConnectionStateProperties # type: ignore from ._models import ControlActivity # type: ignore from ._models import CopyActivity # type: ignore + from ._models import CopyActivityLogSettings # type: ignore from ._models import CopySink # type: ignore from ._models import CopySource # type: ignore from ._models import CopyTranslator # type: ignore @@ -807,6 +832,8 @@ from ._models import DatasetResource # type: ignore from ._models import DatasetSchemaDataElement # type: ignore from ._models import DatasetStorageFormat # type: ignore + from ._models import DatasetTarCompression # type: ignore + from ._models import DatasetTarGZipCompression # type: ignore from ._models import DatasetZipDeflateCompression # type: ignore from ._models import Db2LinkedService # type: ignore from ._models import Db2Source # type: ignore @@ -969,6 +996,8 @@ from ._models import LinkedServiceListResponse # type: ignore from ._models import LinkedServiceReference # type: ignore from ._models import LinkedServiceResource # type: ignore + from ._models import LogLocationSettings # type: ignore + from ._models import LogSettings # type: ignore from ._models import LogStorageSettings # type: ignore from ._models import LookupActivity # type: ignore from ._models import MagentoLinkedService # type: ignore @@ -996,6 +1025,9 @@ from ._models import MicrosoftAccessSink # type: ignore from ._models import MicrosoftAccessSource # type: ignore from ._models import MicrosoftAccessTableDataset # type: ignore + from ._models import MongoDBAtlasCollectionDataset # type: ignore + from ._models import MongoDBAtlasLinkedService # type: ignore + from ._models import MongoDBAtlasSource # type: ignore from ._models import MongoDBCollectionDataset # type: ignore from ._models import MongoDBCursorMethodsProperties # type: ignore from ._models import MongoDBLinkedService # type: ignore @@ -1041,12 +1073,14 @@ from ._models import OrcFormat # type: ignore from ._models import OrcSink # type: ignore from ._models import OrcSource # type: ignore + from ._models import OrcWriteSettings # type: ignore from ._models import PackageStore # type: ignore from ._models import ParameterSpecification # type: ignore from ._models import ParquetDataset # type: ignore from ._models import ParquetFormat # type: ignore from ._models import ParquetSink # type: ignore from ._models import ParquetSource # type: ignore + from ._models import ParquetWriteSettings # type: ignore from ._models import PaypalLinkedService # type: ignore from ._models import PaypalObjectDataset # type: ignore from ._models import PaypalSource # type: ignore @@ -1199,6 +1233,8 @@ from ._models import SybaseTableDataset # type: ignore from ._models import TabularSource # type: ignore from ._models import TabularTranslator # type: ignore + from ._models import TarGZipReadSettings # type: ignore + from ._models import TarReadSettings # type: ignore from ._models import TeradataLinkedService # type: ignore from ._models import TeradataPartitionSettings # type: ignore from ._models import TeradataSource # type: ignore @@ -1306,6 +1342,7 @@ PhoenixAuthenticationType, PolybaseSettingsRejectType, PrestoAuthenticationType, + PublicNetworkAccess, RecurrenceFrequency, RestServiceAuthenticationType, RunQueryFilterOperand, @@ -1393,6 +1430,12 @@ 'AzureDataLakeStoreSink', 'AzureDataLakeStoreSource', 'AzureDataLakeStoreWriteSettings', + 'AzureDatabricksDeltaLakeDataset', + 'AzureDatabricksDeltaLakeExportCommand', + 'AzureDatabricksDeltaLakeImportCommand', + 'AzureDatabricksDeltaLakeLinkedService', + 'AzureDatabricksDeltaLakeSink', + 'AzureDatabricksDeltaLakeSource', 'AzureDatabricksLinkedService', 'AzureFileStorageLinkedService', 'AzureFileStorageLocation', @@ -1462,6 +1505,7 @@ 'ConnectionStateProperties', 'ControlActivity', 'CopyActivity', + 'CopyActivityLogSettings', 'CopySink', 'CopySource', 'CopyTranslator', @@ -1520,6 +1564,8 @@ 'DatasetResource', 'DatasetSchemaDataElement', 'DatasetStorageFormat', + 'DatasetTarCompression', + 'DatasetTarGZipCompression', 'DatasetZipDeflateCompression', 'Db2LinkedService', 'Db2Source', @@ -1682,6 +1728,8 @@ 'LinkedServiceListResponse', 'LinkedServiceReference', 'LinkedServiceResource', + 'LogLocationSettings', + 'LogSettings', 'LogStorageSettings', 'LookupActivity', 'MagentoLinkedService', @@ -1709,6 +1757,9 @@ 'MicrosoftAccessSink', 'MicrosoftAccessSource', 'MicrosoftAccessTableDataset', + 'MongoDBAtlasCollectionDataset', + 'MongoDBAtlasLinkedService', + 'MongoDBAtlasSource', 'MongoDBCollectionDataset', 'MongoDBCursorMethodsProperties', 'MongoDBLinkedService', @@ -1754,12 +1805,14 @@ 'OrcFormat', 'OrcSink', 'OrcSource', + 'OrcWriteSettings', 'PackageStore', 'ParameterSpecification', 'ParquetDataset', 'ParquetFormat', 'ParquetSink', 'ParquetSource', + 'ParquetWriteSettings', 'PaypalLinkedService', 'PaypalObjectDataset', 'PaypalSource', @@ -1912,6 +1965,8 @@ 'SybaseTableDataset', 'TabularSource', 'TabularTranslator', + 'TarGZipReadSettings', + 'TarReadSettings', 'TeradataLinkedService', 'TeradataPartitionSettings', 'TeradataSource', @@ -2017,6 +2072,7 @@ 'PhoenixAuthenticationType', 'PolybaseSettingsRejectType', 'PrestoAuthenticationType', + 'PublicNetworkAccess', 'RecurrenceFrequency', 'RestServiceAuthenticationType', 'RunQueryFilterOperand', diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_data_factory_management_client_enums.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_data_factory_management_client_enums.py index d663167e0f4..a4a7324fe9b 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_data_factory_management_client_enums.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_data_factory_management_client_enums.py @@ -86,6 +86,8 @@ class CompressionCodec(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): DEFLATE = "deflate" ZIP_DEFLATE = "zipDeflate" LZ4 = "lz4" + TAR = "tar" + TAR_G_ZIP = "tarGZip" class CopyBehaviorType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """All available types of copy behavior. @@ -183,7 +185,7 @@ class DynamicsServicePrincipalCredentialType(with_metaclass(_CaseInsensitiveEnum SERVICE_PRINCIPAL_CERT = "ServicePrincipalCert" class DynamicsSinkWriteBehavior(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """The write behavior for the operation. + """Defines values for DynamicsSinkWriteBehavior. """ UPSERT = "Upsert" @@ -451,6 +453,7 @@ class OrcCompressionCodec(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): NONE = "none" ZLIB = "zlib" SNAPPY = "snappy" + LZO = "lzo" class ParameterType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Parameter type. @@ -486,6 +489,13 @@ class PrestoAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enu ANONYMOUS = "Anonymous" LDAP = "LDAP" +class PublicNetworkAccess(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Whether or not public network access is allowed for the data factory. + """ + + ENABLED = "Enabled" + DISABLED = "Disabled" + class RecurrenceFrequency(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Enumerates possible frequency option for the schedule trigger. """ @@ -746,6 +756,7 @@ class TumblingWindowFrequency(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum MINUTE = "Minute" HOUR = "Hour" + MONTH = "Month" class VariableType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Variable type. diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models.py index ec6d27ddb59..c120b1db8dd 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models.py @@ -342,7 +342,7 @@ class LinkedService(msrest.serialization.Model): """The Azure Data Factory nested object which contains the information and credential which can be used to connect with related store or compute resource. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonMwsLinkedService, AmazonRedshiftLinkedService, AmazonS3LinkedService, AzureBatchLinkedService, AzureBlobFsLinkedService, AzureBlobStorageLinkedService, AzureDataExplorerLinkedService, AzureDataLakeAnalyticsLinkedService, AzureDataLakeStoreLinkedService, AzureDatabricksLinkedService, AzureFileStorageLinkedService, AzureFunctionLinkedService, AzureKeyVaultLinkedService, AzureMlLinkedService, AzureMlServiceLinkedService, AzureMariaDBLinkedService, AzureMySQLLinkedService, AzurePostgreSQLLinkedService, AzureSearchLinkedService, AzureSQLDWLinkedService, AzureSQLDatabaseLinkedService, AzureSQLMiLinkedService, AzureStorageLinkedService, AzureTableStorageLinkedService, CassandraLinkedService, CommonDataServiceForAppsLinkedService, ConcurLinkedService, CosmosDBLinkedService, CosmosDBMongoDBApiLinkedService, CouchbaseLinkedService, CustomDataSourceLinkedService, Db2LinkedService, DrillLinkedService, DynamicsLinkedService, DynamicsAxLinkedService, DynamicsCrmLinkedService, EloquaLinkedService, FileServerLinkedService, FtpServerLinkedService, GoogleAdWordsLinkedService, GoogleBigQueryLinkedService, GoogleCloudStorageLinkedService, GreenplumLinkedService, HBaseLinkedService, HdInsightLinkedService, HdInsightOnDemandLinkedService, HdfsLinkedService, HiveLinkedService, HttpLinkedService, HubspotLinkedService, ImpalaLinkedService, InformixLinkedService, JiraLinkedService, MagentoLinkedService, MariaDBLinkedService, MarketoLinkedService, MicrosoftAccessLinkedService, MongoDBLinkedService, MongoDBV2LinkedService, MySQLLinkedService, NetezzaLinkedService, ODataLinkedService, OdbcLinkedService, Office365LinkedService, OracleLinkedService, OracleServiceCloudLinkedService, PaypalLinkedService, PhoenixLinkedService, PostgreSQLLinkedService, PrestoLinkedService, QuickBooksLinkedService, ResponsysLinkedService, RestServiceLinkedService, SalesforceLinkedService, SalesforceMarketingCloudLinkedService, SalesforceServiceCloudLinkedService, SapBwLinkedService, SapCloudForCustomerLinkedService, SapEccLinkedService, SapHanaLinkedService, SapOpenHubLinkedService, SapTableLinkedService, ServiceNowLinkedService, SftpServerLinkedService, SharePointOnlineListLinkedService, ShopifyLinkedService, SnowflakeLinkedService, SparkLinkedService, SQLServerLinkedService, SquareLinkedService, SybaseLinkedService, TeradataLinkedService, VerticaLinkedService, WebLinkedService, XeroLinkedService, ZohoLinkedService. + sub-classes are: AmazonMwsLinkedService, AmazonRedshiftLinkedService, AmazonS3LinkedService, AzureBatchLinkedService, AzureBlobFsLinkedService, AzureBlobStorageLinkedService, AzureDataExplorerLinkedService, AzureDataLakeAnalyticsLinkedService, AzureDataLakeStoreLinkedService, AzureDatabricksLinkedService, AzureDatabricksDeltaLakeLinkedService, AzureFileStorageLinkedService, AzureFunctionLinkedService, AzureKeyVaultLinkedService, AzureMlLinkedService, AzureMlServiceLinkedService, AzureMariaDBLinkedService, AzureMySQLLinkedService, AzurePostgreSQLLinkedService, AzureSearchLinkedService, AzureSQLDWLinkedService, AzureSQLDatabaseLinkedService, AzureSQLMiLinkedService, AzureStorageLinkedService, AzureTableStorageLinkedService, CassandraLinkedService, CommonDataServiceForAppsLinkedService, ConcurLinkedService, CosmosDBLinkedService, CosmosDBMongoDBApiLinkedService, CouchbaseLinkedService, CustomDataSourceLinkedService, Db2LinkedService, DrillLinkedService, DynamicsLinkedService, DynamicsAxLinkedService, DynamicsCrmLinkedService, EloquaLinkedService, FileServerLinkedService, FtpServerLinkedService, GoogleAdWordsLinkedService, GoogleBigQueryLinkedService, GoogleCloudStorageLinkedService, GreenplumLinkedService, HBaseLinkedService, HdInsightLinkedService, HdInsightOnDemandLinkedService, HdfsLinkedService, HiveLinkedService, HttpLinkedService, HubspotLinkedService, ImpalaLinkedService, InformixLinkedService, JiraLinkedService, MagentoLinkedService, MariaDBLinkedService, MarketoLinkedService, MicrosoftAccessLinkedService, MongoDBLinkedService, MongoDBAtlasLinkedService, MongoDBV2LinkedService, MySQLLinkedService, NetezzaLinkedService, ODataLinkedService, OdbcLinkedService, Office365LinkedService, OracleLinkedService, OracleServiceCloudLinkedService, PaypalLinkedService, PhoenixLinkedService, PostgreSQLLinkedService, PrestoLinkedService, QuickBooksLinkedService, ResponsysLinkedService, RestServiceLinkedService, SalesforceLinkedService, SalesforceMarketingCloudLinkedService, SalesforceServiceCloudLinkedService, SapBwLinkedService, SapCloudForCustomerLinkedService, SapEccLinkedService, SapHanaLinkedService, SapOpenHubLinkedService, SapTableLinkedService, ServiceNowLinkedService, SftpServerLinkedService, SharePointOnlineListLinkedService, ShopifyLinkedService, SnowflakeLinkedService, SparkLinkedService, SQLServerLinkedService, SquareLinkedService, SybaseLinkedService, TeradataLinkedService, VerticaLinkedService, WebLinkedService, XeroLinkedService, ZohoLinkedService. All required parameters must be populated in order to send to Azure. @@ -375,7 +375,7 @@ class LinkedService(msrest.serialization.Model): } _subtype_map = { - 'type': {'AmazonMWS': 'AmazonMwsLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureBlobFS': 'AzureBlobFsLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureFileStorage': 'AzureFileStorageLinkedService', 'AzureFunction': 'AzureFunctionLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureML': 'AzureMlLinkedService', 'AzureMLService': 'AzureMlServiceLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'AzureMySql': 'AzureMySQLLinkedService', 'AzurePostgreSql': 'AzurePostgreSQLLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'AzureSqlDW': 'AzureSQLDWLinkedService', 'AzureSqlDatabase': 'AzureSQLDatabaseLinkedService', 'AzureSqlMI': 'AzureSQLMiLinkedService', 'AzureStorage': 'AzureStorageLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'Cassandra': 'CassandraLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'Concur': 'ConcurLinkedService', 'CosmosDb': 'CosmosDBLinkedService', 'CosmosDbMongoDbApi': 'CosmosDBMongoDBApiLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'Db2': 'Db2LinkedService', 'Drill': 'DrillLinkedService', 'Dynamics': 'DynamicsLinkedService', 'DynamicsAX': 'DynamicsAxLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Eloqua': 'EloquaLinkedService', 'FileServer': 'FileServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'GoogleCloudStorage': 'GoogleCloudStorageLinkedService', 'Greenplum': 'GreenplumLinkedService', 'HBase': 'HBaseLinkedService', 'HDInsight': 'HdInsightLinkedService', 'HDInsightOnDemand': 'HdInsightOnDemandLinkedService', 'Hdfs': 'HdfsLinkedService', 'Hive': 'HiveLinkedService', 'HttpServer': 'HttpLinkedService', 'Hubspot': 'HubspotLinkedService', 'Impala': 'ImpalaLinkedService', 'Informix': 'InformixLinkedService', 'Jira': 'JiraLinkedService', 'Magento': 'MagentoLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Marketo': 'MarketoLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'MongoDb': 'MongoDBLinkedService', 'MongoDbV2': 'MongoDBV2LinkedService', 'MySql': 'MySQLLinkedService', 'Netezza': 'NetezzaLinkedService', 'OData': 'ODataLinkedService', 'Odbc': 'OdbcLinkedService', 'Office365': 'Office365LinkedService', 'Oracle': 'OracleLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'Paypal': 'PaypalLinkedService', 'Phoenix': 'PhoenixLinkedService', 'PostgreSql': 'PostgreSQLLinkedService', 'Presto': 'PrestoLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Responsys': 'ResponsysLinkedService', 'RestService': 'RestServiceLinkedService', 'Salesforce': 'SalesforceLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'SapBW': 'SapBwLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapTable': 'SapTableLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'Sftp': 'SftpServerLinkedService', 'SharePointOnlineList': 'SharePointOnlineListLinkedService', 'Shopify': 'ShopifyLinkedService', 'Snowflake': 'SnowflakeLinkedService', 'Spark': 'SparkLinkedService', 'SqlServer': 'SQLServerLinkedService', 'Square': 'SquareLinkedService', 'Sybase': 'SybaseLinkedService', 'Teradata': 'TeradataLinkedService', 'Vertica': 'VerticaLinkedService', 'Web': 'WebLinkedService', 'Xero': 'XeroLinkedService', 'Zoho': 'ZohoLinkedService'} + 'type': {'AmazonMWS': 'AmazonMwsLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureBlobFS': 'AzureBlobFsLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDatabricksDeltaLake': 'AzureDatabricksDeltaLakeLinkedService', 'AzureFileStorage': 'AzureFileStorageLinkedService', 'AzureFunction': 'AzureFunctionLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureML': 'AzureMlLinkedService', 'AzureMLService': 'AzureMlServiceLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'AzureMySql': 'AzureMySQLLinkedService', 'AzurePostgreSql': 'AzurePostgreSQLLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'AzureSqlDW': 'AzureSQLDWLinkedService', 'AzureSqlDatabase': 'AzureSQLDatabaseLinkedService', 'AzureSqlMI': 'AzureSQLMiLinkedService', 'AzureStorage': 'AzureStorageLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'Cassandra': 'CassandraLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'Concur': 'ConcurLinkedService', 'CosmosDb': 'CosmosDBLinkedService', 'CosmosDbMongoDbApi': 'CosmosDBMongoDBApiLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'Db2': 'Db2LinkedService', 'Drill': 'DrillLinkedService', 'Dynamics': 'DynamicsLinkedService', 'DynamicsAX': 'DynamicsAxLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Eloqua': 'EloquaLinkedService', 'FileServer': 'FileServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'GoogleCloudStorage': 'GoogleCloudStorageLinkedService', 'Greenplum': 'GreenplumLinkedService', 'HBase': 'HBaseLinkedService', 'HDInsight': 'HdInsightLinkedService', 'HDInsightOnDemand': 'HdInsightOnDemandLinkedService', 'Hdfs': 'HdfsLinkedService', 'Hive': 'HiveLinkedService', 'HttpServer': 'HttpLinkedService', 'Hubspot': 'HubspotLinkedService', 'Impala': 'ImpalaLinkedService', 'Informix': 'InformixLinkedService', 'Jira': 'JiraLinkedService', 'Magento': 'MagentoLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Marketo': 'MarketoLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'MongoDb': 'MongoDBLinkedService', 'MongoDbAtlas': 'MongoDBAtlasLinkedService', 'MongoDbV2': 'MongoDBV2LinkedService', 'MySql': 'MySQLLinkedService', 'Netezza': 'NetezzaLinkedService', 'OData': 'ODataLinkedService', 'Odbc': 'OdbcLinkedService', 'Office365': 'Office365LinkedService', 'Oracle': 'OracleLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'Paypal': 'PaypalLinkedService', 'Phoenix': 'PhoenixLinkedService', 'PostgreSql': 'PostgreSQLLinkedService', 'Presto': 'PrestoLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Responsys': 'ResponsysLinkedService', 'RestService': 'RestServiceLinkedService', 'Salesforce': 'SalesforceLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'SapBW': 'SapBwLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapTable': 'SapTableLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'Sftp': 'SftpServerLinkedService', 'SharePointOnlineList': 'SharePointOnlineListLinkedService', 'Shopify': 'ShopifyLinkedService', 'Snowflake': 'SnowflakeLinkedService', 'Spark': 'SparkLinkedService', 'SqlServer': 'SQLServerLinkedService', 'Square': 'SquareLinkedService', 'Sybase': 'SybaseLinkedService', 'Teradata': 'TeradataLinkedService', 'Vertica': 'VerticaLinkedService', 'Web': 'WebLinkedService', 'Xero': 'XeroLinkedService', 'Zoho': 'ZohoLinkedService'} } def __init__( @@ -489,7 +489,7 @@ class Dataset(msrest.serialization.Model): """The Azure Data Factory nested object which identifies data within different data stores, such as tables, files, folders, and documents. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonMwsObjectDataset, AmazonRedshiftTableDataset, AmazonS3Dataset, AvroDataset, AzureBlobDataset, AzureBlobFsDataset, AzureDataExplorerTableDataset, AzureDataLakeStoreDataset, AzureMariaDBTableDataset, AzureMySQLTableDataset, AzurePostgreSQLTableDataset, AzureSearchIndexDataset, AzureSQLDWTableDataset, AzureSQLMiTableDataset, AzureSQLTableDataset, AzureTableDataset, BinaryDataset, CassandraTableDataset, CommonDataServiceForAppsEntityDataset, ConcurObjectDataset, CosmosDBMongoDBApiCollectionDataset, CosmosDBSQLApiCollectionDataset, CouchbaseTableDataset, CustomDataset, Db2TableDataset, DelimitedTextDataset, DocumentDBCollectionDataset, DrillTableDataset, DynamicsAxResourceDataset, DynamicsCrmEntityDataset, DynamicsEntityDataset, EloquaObjectDataset, ExcelDataset, FileShareDataset, GoogleAdWordsObjectDataset, GoogleBigQueryObjectDataset, GreenplumTableDataset, HBaseObjectDataset, HiveObjectDataset, HttpDataset, HubspotObjectDataset, ImpalaObjectDataset, InformixTableDataset, JiraObjectDataset, JsonDataset, MagentoObjectDataset, MariaDBTableDataset, MarketoObjectDataset, MicrosoftAccessTableDataset, MongoDBCollectionDataset, MongoDBV2CollectionDataset, MySQLTableDataset, NetezzaTableDataset, ODataResourceDataset, OdbcTableDataset, Office365Dataset, OracleServiceCloudObjectDataset, OracleTableDataset, OrcDataset, ParquetDataset, PaypalObjectDataset, PhoenixObjectDataset, PostgreSQLTableDataset, PrestoObjectDataset, QuickBooksObjectDataset, RelationalTableDataset, ResponsysObjectDataset, RestResourceDataset, SalesforceMarketingCloudObjectDataset, SalesforceObjectDataset, SalesforceServiceCloudObjectDataset, SapBwCubeDataset, SapCloudForCustomerResourceDataset, SapEccResourceDataset, SapHanaTableDataset, SapOpenHubTableDataset, SapTableResourceDataset, ServiceNowObjectDataset, SharePointOnlineListResourceDataset, ShopifyObjectDataset, SnowflakeDataset, SparkObjectDataset, SQLServerTableDataset, SquareObjectDataset, SybaseTableDataset, TeradataTableDataset, VerticaTableDataset, WebTableDataset, XeroObjectDataset, XmlDataset, ZohoObjectDataset. + sub-classes are: AmazonMwsObjectDataset, AmazonRedshiftTableDataset, AmazonS3Dataset, AvroDataset, AzureBlobDataset, AzureBlobFsDataset, AzureDataExplorerTableDataset, AzureDataLakeStoreDataset, AzureDatabricksDeltaLakeDataset, AzureMariaDBTableDataset, AzureMySQLTableDataset, AzurePostgreSQLTableDataset, AzureSearchIndexDataset, AzureSQLDWTableDataset, AzureSQLMiTableDataset, AzureSQLTableDataset, AzureTableDataset, BinaryDataset, CassandraTableDataset, CommonDataServiceForAppsEntityDataset, ConcurObjectDataset, CosmosDBMongoDBApiCollectionDataset, CosmosDBSQLApiCollectionDataset, CouchbaseTableDataset, CustomDataset, Db2TableDataset, DelimitedTextDataset, DocumentDBCollectionDataset, DrillTableDataset, DynamicsAxResourceDataset, DynamicsCrmEntityDataset, DynamicsEntityDataset, EloquaObjectDataset, ExcelDataset, FileShareDataset, GoogleAdWordsObjectDataset, GoogleBigQueryObjectDataset, GreenplumTableDataset, HBaseObjectDataset, HiveObjectDataset, HttpDataset, HubspotObjectDataset, ImpalaObjectDataset, InformixTableDataset, JiraObjectDataset, JsonDataset, MagentoObjectDataset, MariaDBTableDataset, MarketoObjectDataset, MicrosoftAccessTableDataset, MongoDBAtlasCollectionDataset, MongoDBCollectionDataset, MongoDBV2CollectionDataset, MySQLTableDataset, NetezzaTableDataset, ODataResourceDataset, OdbcTableDataset, Office365Dataset, OracleServiceCloudObjectDataset, OracleTableDataset, OrcDataset, ParquetDataset, PaypalObjectDataset, PhoenixObjectDataset, PostgreSQLTableDataset, PrestoObjectDataset, QuickBooksObjectDataset, RelationalTableDataset, ResponsysObjectDataset, RestResourceDataset, SalesforceMarketingCloudObjectDataset, SalesforceObjectDataset, SalesforceServiceCloudObjectDataset, SapBwCubeDataset, SapCloudForCustomerResourceDataset, SapEccResourceDataset, SapHanaTableDataset, SapOpenHubTableDataset, SapTableResourceDataset, ServiceNowObjectDataset, SharePointOnlineListResourceDataset, ShopifyObjectDataset, SnowflakeDataset, SparkObjectDataset, SQLServerTableDataset, SquareObjectDataset, SybaseTableDataset, TeradataTableDataset, VerticaTableDataset, WebTableDataset, XeroObjectDataset, XmlDataset, ZohoObjectDataset. All required parameters must be populated in order to send to Azure. @@ -535,7 +535,7 @@ class Dataset(msrest.serialization.Model): } _subtype_map = { - 'type': {'AmazonMWSObject': 'AmazonMwsObjectDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AmazonS3Object': 'AmazonS3Dataset', 'Avro': 'AvroDataset', 'AzureBlob': 'AzureBlobDataset', 'AzureBlobFSFile': 'AzureBlobFsDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'AzureMySqlTable': 'AzureMySQLTableDataset', 'AzurePostgreSqlTable': 'AzurePostgreSQLTableDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'AzureSqlDWTable': 'AzureSQLDWTableDataset', 'AzureSqlMITable': 'AzureSQLMiTableDataset', 'AzureSqlTable': 'AzureSQLTableDataset', 'AzureTable': 'AzureTableDataset', 'Binary': 'BinaryDataset', 'CassandraTable': 'CassandraTableDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'ConcurObject': 'ConcurObjectDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDBMongoDBApiCollectionDataset', 'CosmosDbSqlApiCollection': 'CosmosDBSQLApiCollectionDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'CustomDataset': 'CustomDataset', 'Db2Table': 'Db2TableDataset', 'DelimitedText': 'DelimitedTextDataset', 'DocumentDbCollection': 'DocumentDBCollectionDataset', 'DrillTable': 'DrillTableDataset', 'DynamicsAXResource': 'DynamicsAxResourceDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'EloquaObject': 'EloquaObjectDataset', 'Excel': 'ExcelDataset', 'FileShare': 'FileShareDataset', 'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'HBaseObject': 'HBaseObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HttpFile': 'HttpDataset', 'HubspotObject': 'HubspotObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'InformixTable': 'InformixTableDataset', 'JiraObject': 'JiraObjectDataset', 'Json': 'JsonDataset', 'MagentoObject': 'MagentoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MarketoObject': 'MarketoObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'MongoDbCollection': 'MongoDBCollectionDataset', 'MongoDbV2Collection': 'MongoDBV2CollectionDataset', 'MySqlTable': 'MySQLTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ODataResource': 'ODataResourceDataset', 'OdbcTable': 'OdbcTableDataset', 'Office365Table': 'Office365Dataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'OracleTable': 'OracleTableDataset', 'Orc': 'OrcDataset', 'Parquet': 'ParquetDataset', 'PaypalObject': 'PaypalObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PostgreSqlTable': 'PostgreSQLTableDataset', 'PrestoObject': 'PrestoObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'RestResource': 'RestResourceDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SapBwCube': 'SapBwCubeDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'SharePointOnlineListResource': 'SharePointOnlineListResourceDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'SnowflakeTable': 'SnowflakeDataset', 'SparkObject': 'SparkObjectDataset', 'SqlServerTable': 'SQLServerTableDataset', 'SquareObject': 'SquareObjectDataset', 'SybaseTable': 'SybaseTableDataset', 'TeradataTable': 'TeradataTableDataset', 'VerticaTable': 'VerticaTableDataset', 'WebTable': 'WebTableDataset', 'XeroObject': 'XeroObjectDataset', 'Xml': 'XmlDataset', 'ZohoObject': 'ZohoObjectDataset'} + 'type': {'AmazonMWSObject': 'AmazonMwsObjectDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AmazonS3Object': 'AmazonS3Dataset', 'Avro': 'AvroDataset', 'AzureBlob': 'AzureBlobDataset', 'AzureBlobFSFile': 'AzureBlobFsDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'AzureDatabricksDeltaLakeDataset': 'AzureDatabricksDeltaLakeDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'AzureMySqlTable': 'AzureMySQLTableDataset', 'AzurePostgreSqlTable': 'AzurePostgreSQLTableDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'AzureSqlDWTable': 'AzureSQLDWTableDataset', 'AzureSqlMITable': 'AzureSQLMiTableDataset', 'AzureSqlTable': 'AzureSQLTableDataset', 'AzureTable': 'AzureTableDataset', 'Binary': 'BinaryDataset', 'CassandraTable': 'CassandraTableDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'ConcurObject': 'ConcurObjectDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDBMongoDBApiCollectionDataset', 'CosmosDbSqlApiCollection': 'CosmosDBSQLApiCollectionDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'CustomDataset': 'CustomDataset', 'Db2Table': 'Db2TableDataset', 'DelimitedText': 'DelimitedTextDataset', 'DocumentDbCollection': 'DocumentDBCollectionDataset', 'DrillTable': 'DrillTableDataset', 'DynamicsAXResource': 'DynamicsAxResourceDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'EloquaObject': 'EloquaObjectDataset', 'Excel': 'ExcelDataset', 'FileShare': 'FileShareDataset', 'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'HBaseObject': 'HBaseObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HttpFile': 'HttpDataset', 'HubspotObject': 'HubspotObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'InformixTable': 'InformixTableDataset', 'JiraObject': 'JiraObjectDataset', 'Json': 'JsonDataset', 'MagentoObject': 'MagentoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MarketoObject': 'MarketoObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'MongoDbAtlasCollection': 'MongoDBAtlasCollectionDataset', 'MongoDbCollection': 'MongoDBCollectionDataset', 'MongoDbV2Collection': 'MongoDBV2CollectionDataset', 'MySqlTable': 'MySQLTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ODataResource': 'ODataResourceDataset', 'OdbcTable': 'OdbcTableDataset', 'Office365Table': 'Office365Dataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'OracleTable': 'OracleTableDataset', 'Orc': 'OrcDataset', 'Parquet': 'ParquetDataset', 'PaypalObject': 'PaypalObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PostgreSqlTable': 'PostgreSQLTableDataset', 'PrestoObject': 'PrestoObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'RestResource': 'RestResourceDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SapBwCube': 'SapBwCubeDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'SharePointOnlineListResource': 'SharePointOnlineListResourceDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'SnowflakeTable': 'SnowflakeDataset', 'SparkObject': 'SparkObjectDataset', 'SqlServerTable': 'SQLServerTableDataset', 'SquareObject': 'SquareObjectDataset', 'SybaseTable': 'SybaseTableDataset', 'TeradataTable': 'TeradataTableDataset', 'VerticaTable': 'VerticaTableDataset', 'WebTable': 'WebTableDataset', 'XeroObject': 'XeroObjectDataset', 'Xml': 'XmlDataset', 'ZohoObject': 'ZohoObjectDataset'} } def __init__( @@ -616,7 +616,7 @@ class CopySource(msrest.serialization.Model): """A copy activity source. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AvroSource, AzureBlobFsSource, AzureDataExplorerSource, AzureDataLakeStoreSource, BinarySource, BlobSource, CommonDataServiceForAppsSource, CosmosDBMongoDBApiSource, CosmosDBSQLApiSource, DelimitedTextSource, DocumentDBCollectionSource, DynamicsCrmSource, DynamicsSource, ExcelSource, FileSystemSource, HdfsSource, HttpSource, JsonSource, MicrosoftAccessSource, MongoDBSource, MongoDBV2Source, ODataSource, Office365Source, OracleSource, OrcSource, ParquetSource, RelationalSource, RestSource, SalesforceServiceCloudSource, SharePointOnlineListSource, SnowflakeSource, TabularSource, WebSource, XmlSource. + sub-classes are: AvroSource, AzureBlobFsSource, AzureDataExplorerSource, AzureDataLakeStoreSource, AzureDatabricksDeltaLakeSource, BinarySource, BlobSource, CommonDataServiceForAppsSource, CosmosDBMongoDBApiSource, CosmosDBSQLApiSource, DelimitedTextSource, DocumentDBCollectionSource, DynamicsCrmSource, DynamicsSource, ExcelSource, FileSystemSource, HdfsSource, HttpSource, JsonSource, MicrosoftAccessSource, MongoDBAtlasSource, MongoDBSource, MongoDBV2Source, ODataSource, Office365Source, OracleSource, OrcSource, ParquetSource, RelationalSource, RestSource, SalesforceServiceCloudSource, SharePointOnlineListSource, SnowflakeSource, TabularSource, WebSource, XmlSource. All required parameters must be populated in order to send to Azure. @@ -649,7 +649,7 @@ class CopySource(msrest.serialization.Model): } _subtype_map = { - 'type': {'AvroSource': 'AvroSource', 'AzureBlobFSSource': 'AzureBlobFsSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'BinarySource': 'BinarySource', 'BlobSource': 'BlobSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'CosmosDbMongoDbApiSource': 'CosmosDBMongoDBApiSource', 'CosmosDbSqlApiSource': 'CosmosDBSQLApiSource', 'DelimitedTextSource': 'DelimitedTextSource', 'DocumentDbCollectionSource': 'DocumentDBCollectionSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'ExcelSource': 'ExcelSource', 'FileSystemSource': 'FileSystemSource', 'HdfsSource': 'HdfsSource', 'HttpSource': 'HttpSource', 'JsonSource': 'JsonSource', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'MongoDbSource': 'MongoDBSource', 'MongoDbV2Source': 'MongoDBV2Source', 'ODataSource': 'ODataSource', 'Office365Source': 'Office365Source', 'OracleSource': 'OracleSource', 'OrcSource': 'OrcSource', 'ParquetSource': 'ParquetSource', 'RelationalSource': 'RelationalSource', 'RestSource': 'RestSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SharePointOnlineListSource': 'SharePointOnlineListSource', 'SnowflakeSource': 'SnowflakeSource', 'TabularSource': 'TabularSource', 'WebSource': 'WebSource', 'XmlSource': 'XmlSource'} + 'type': {'AvroSource': 'AvroSource', 'AzureBlobFSSource': 'AzureBlobFsSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'AzureDatabricksDeltaLakeSource': 'AzureDatabricksDeltaLakeSource', 'BinarySource': 'BinarySource', 'BlobSource': 'BlobSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'CosmosDbMongoDbApiSource': 'CosmosDBMongoDBApiSource', 'CosmosDbSqlApiSource': 'CosmosDBSQLApiSource', 'DelimitedTextSource': 'DelimitedTextSource', 'DocumentDbCollectionSource': 'DocumentDBCollectionSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'ExcelSource': 'ExcelSource', 'FileSystemSource': 'FileSystemSource', 'HdfsSource': 'HdfsSource', 'HttpSource': 'HttpSource', 'JsonSource': 'JsonSource', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'MongoDbAtlasSource': 'MongoDBAtlasSource', 'MongoDbSource': 'MongoDBSource', 'MongoDbV2Source': 'MongoDBV2Source', 'ODataSource': 'ODataSource', 'Office365Source': 'Office365Source', 'OracleSource': 'OracleSource', 'OrcSource': 'OrcSource', 'ParquetSource': 'ParquetSource', 'RelationalSource': 'RelationalSource', 'RestSource': 'RestSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SharePointOnlineListSource': 'SharePointOnlineListSource', 'SnowflakeSource': 'SnowflakeSource', 'TabularSource': 'TabularSource', 'WebSource': 'WebSource', 'XmlSource': 'XmlSource'} } def __init__( @@ -1088,6 +1088,9 @@ class AmazonS3LinkedService(LinkedService): :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] + :param authentication_type: The authentication type of S3. Allowed value: AccessKey (default) + or TemporarySecurityCredentials. Type: string (or Expression with resultType string). + :type authentication_type: object :param access_key_id: The access key identifier of the Amazon S3 Identity and Access Management (IAM) user. Type: string (or Expression with resultType string). :type access_key_id: object @@ -1098,6 +1101,8 @@ class AmazonS3LinkedService(LinkedService): an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). :type service_url: object + :param session_token: The session token for the S3 temporary security credential. + :type session_token: ~data_factory_management_client.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -1115,9 +1120,11 @@ class AmazonS3LinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, + 'session_token': {'key': 'typeProperties.sessionToken', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -1127,9 +1134,11 @@ def __init__( ): super(AmazonS3LinkedService, self).__init__(**kwargs) self.type = 'AmazonS3' # type: str + self.authentication_type = kwargs.get('authentication_type', None) self.access_key_id = kwargs.get('access_key_id', None) self.secret_access_key = kwargs.get('secret_access_key', None) self.service_url = kwargs.get('service_url', None) + self.session_token = kwargs.get('session_token', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) @@ -1555,7 +1564,7 @@ class CopySink(msrest.serialization.Model): """A copy activity sink. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AvroSink, AzureBlobFsSink, AzureDataExplorerSink, AzureDataLakeStoreSink, AzureMySQLSink, AzurePostgreSQLSink, AzureQueueSink, AzureSearchIndexSink, AzureSQLSink, AzureTableSink, BinarySink, BlobSink, CommonDataServiceForAppsSink, CosmosDBMongoDBApiSink, CosmosDBSQLApiSink, DelimitedTextSink, DocumentDBCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, InformixSink, JsonSink, MicrosoftAccessSink, OdbcSink, OracleSink, OrcSink, ParquetSink, RestSink, SalesforceServiceCloudSink, SalesforceSink, SapCloudForCustomerSink, SnowflakeSink, SQLDWSink, SQLMiSink, SQLServerSink, SQLSink. + sub-classes are: AvroSink, AzureBlobFsSink, AzureDataExplorerSink, AzureDataLakeStoreSink, AzureDatabricksDeltaLakeSink, AzureMySQLSink, AzurePostgreSQLSink, AzureQueueSink, AzureSearchIndexSink, AzureSQLSink, AzureTableSink, BinarySink, BlobSink, CommonDataServiceForAppsSink, CosmosDBMongoDBApiSink, CosmosDBSQLApiSink, DelimitedTextSink, DocumentDBCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, InformixSink, JsonSink, MicrosoftAccessSink, OdbcSink, OracleSink, OrcSink, ParquetSink, RestSink, SalesforceServiceCloudSink, SalesforceSink, SapCloudForCustomerSink, SnowflakeSink, SQLDWSink, SQLMiSink, SQLServerSink, SQLSink. All required parameters must be populated in order to send to Azure. @@ -1596,7 +1605,7 @@ class CopySink(msrest.serialization.Model): } _subtype_map = { - 'type': {'AvroSink': 'AvroSink', 'AzureBlobFSSink': 'AzureBlobFsSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'AzureMySqlSink': 'AzureMySQLSink', 'AzurePostgreSqlSink': 'AzurePostgreSQLSink', 'AzureQueueSink': 'AzureQueueSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureSqlSink': 'AzureSQLSink', 'AzureTableSink': 'AzureTableSink', 'BinarySink': 'BinarySink', 'BlobSink': 'BlobSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'CosmosDbMongoDbApiSink': 'CosmosDBMongoDBApiSink', 'CosmosDbSqlApiSink': 'CosmosDBSQLApiSink', 'DelimitedTextSink': 'DelimitedTextSink', 'DocumentDbCollectionSink': 'DocumentDBCollectionSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'FileSystemSink': 'FileSystemSink', 'InformixSink': 'InformixSink', 'JsonSink': 'JsonSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'OdbcSink': 'OdbcSink', 'OracleSink': 'OracleSink', 'OrcSink': 'OrcSink', 'ParquetSink': 'ParquetSink', 'RestSink': 'RestSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SQLDWSink', 'SqlMISink': 'SQLMiSink', 'SqlServerSink': 'SQLServerSink', 'SqlSink': 'SQLSink'} + 'type': {'AvroSink': 'AvroSink', 'AzureBlobFSSink': 'AzureBlobFsSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'AzureDatabricksDeltaLakeSink': 'AzureDatabricksDeltaLakeSink', 'AzureMySqlSink': 'AzureMySQLSink', 'AzurePostgreSqlSink': 'AzurePostgreSQLSink', 'AzureQueueSink': 'AzureQueueSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureSqlSink': 'AzureSQLSink', 'AzureTableSink': 'AzureTableSink', 'BinarySink': 'BinarySink', 'BlobSink': 'BlobSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'CosmosDbMongoDbApiSink': 'CosmosDBMongoDBApiSink', 'CosmosDbSqlApiSink': 'CosmosDBSQLApiSink', 'DelimitedTextSink': 'DelimitedTextSink', 'DocumentDbCollectionSink': 'DocumentDBCollectionSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'FileSystemSink': 'FileSystemSink', 'InformixSink': 'InformixSink', 'JsonSink': 'JsonSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'OdbcSink': 'OdbcSink', 'OracleSink': 'OracleSink', 'OrcSink': 'OrcSink', 'ParquetSink': 'ParquetSink', 'RestSink': 'RestSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SQLDWSink', 'SqlMISink': 'SQLMiSink', 'SqlServerSink': 'SQLServerSink', 'SqlSink': 'SQLSink'} } def __init__( @@ -1724,7 +1733,7 @@ class FormatWriteSettings(msrest.serialization.Model): """Format write settings. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AvroWriteSettings, DelimitedTextWriteSettings, JsonWriteSettings. + sub-classes are: AvroWriteSettings, DelimitedTextWriteSettings, JsonWriteSettings, OrcWriteSettings, ParquetWriteSettings. All required parameters must be populated in order to send to Azure. @@ -1745,7 +1754,7 @@ class FormatWriteSettings(msrest.serialization.Model): } _subtype_map = { - 'type': {'AvroWriteSettings': 'AvroWriteSettings', 'DelimitedTextWriteSettings': 'DelimitedTextWriteSettings', 'JsonWriteSettings': 'JsonWriteSettings'} + 'type': {'AvroWriteSettings': 'AvroWriteSettings', 'DelimitedTextWriteSettings': 'DelimitedTextWriteSettings', 'JsonWriteSettings': 'JsonWriteSettings', 'OrcWriteSettings': 'OrcWriteSettings', 'ParquetWriteSettings': 'ParquetWriteSettings'} } def __init__( @@ -1771,6 +1780,13 @@ class AvroWriteSettings(FormatWriteSettings): :type record_name: str :param record_namespace: Record namespace in the write result. :type record_namespace: str + :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the + specified count. Type: integer (or Expression with resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + :code:``_:code:``.:code:`` when copy from non-file + based store without partitionOptions. Type: string (or Expression with resultType string). + :type file_name_prefix: object """ _validation = { @@ -1782,6 +1798,8 @@ class AvroWriteSettings(FormatWriteSettings): 'type': {'key': 'type', 'type': 'str'}, 'record_name': {'key': 'recordName', 'type': 'str'}, 'record_namespace': {'key': 'recordNamespace', 'type': 'str'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, } def __init__( @@ -1792,6 +1810,8 @@ def __init__( self.type = 'AvroWriteSettings' # type: str self.record_name = kwargs.get('record_name', None) self.record_namespace = kwargs.get('record_namespace', None) + self.max_rows_per_file = kwargs.get('max_rows_per_file', None) + self.file_name_prefix = kwargs.get('file_name_prefix', None) class CustomSetupBase(msrest.serialization.Model): @@ -2742,323 +2762,712 @@ def __init__( self.block_size_in_mb = kwargs.get('block_size_in_mb', None) -class AzureDatabricksLinkedService(LinkedService): - """Azure Databricks linked service. +class AzureDatabricksDeltaLakeDataset(Dataset): + """Azure Databricks Delta Lake dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Type of dataset.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference - :param description: Linked service description. + :param description: Dataset description. :type description: str - :param parameters: Parameters for linked service. + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :param parameters: Parameters for dataset. :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. + :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] - :param domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks - deployment. Type: string (or Expression with resultType string). - :type domain: object - :param access_token: Required. Access token for databricks REST API. Refer to - https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string (or Expression - with resultType string). - :type access_token: ~data_factory_management_client.models.SecretBase - :param existing_cluster_id: The id of an existing interactive cluster that will be used for all - runs of this activity. Type: string (or Expression with resultType string). - :type existing_cluster_id: object - :param instance_pool_id: The id of an existing instance pool that will be used for all runs of - this activity. Type: string (or Expression with resultType string). - :type instance_pool_id: object - :param new_cluster_version: If not using an existing interactive cluster, this specifies the - Spark version of a new job cluster or instance pool nodes created for each run of this - activity. Required if instancePoolId is specified. Type: string (or Expression with resultType + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~data_factory_management_client.models.DatasetFolder + :param table: The name of delta table. Type: string (or Expression with resultType string). + :type table: object + :param database: The database name of delta table. Type: string (or Expression with resultType string). - :type new_cluster_version: object - :param new_cluster_num_of_worker: If not using an existing interactive cluster, this specifies - the number of worker nodes to use for the new job cluster or instance pool. For new job - clusters, this a string-formatted Int32, like '1' means numOfWorker is 1 or '1:10' means auto- - scale from 1 (min) to 10 (max). For instance pools, this is a string-formatted Int32, and can - only specify a fixed number of worker nodes, such as '2'. Required if newClusterVersion is - specified. Type: string (or Expression with resultType string). - :type new_cluster_num_of_worker: object - :param new_cluster_node_type: The node type of the new job cluster. This property is required - if newClusterVersion is specified and instancePoolId is not specified. If instancePoolId is - specified, this property is ignored. Type: string (or Expression with resultType string). - :type new_cluster_node_type: object - :param new_cluster_spark_conf: A set of optional, user-specified Spark configuration key-value - pairs. - :type new_cluster_spark_conf: dict[str, object] - :param new_cluster_spark_env_vars: A set of optional, user-specified Spark environment - variables key-value pairs. - :type new_cluster_spark_env_vars: dict[str, object] - :param new_cluster_custom_tags: Additional tags for cluster resources. This property is ignored - in instance pool configurations. - :type new_cluster_custom_tags: dict[str, object] - :param new_cluster_log_destination: Specify a location to deliver Spark driver, worker, and - event logs. Type: string (or Expression with resultType string). - :type new_cluster_log_destination: object - :param new_cluster_driver_node_type: The driver node type for the new job cluster. This - property is ignored in instance pool configurations. Type: string (or Expression with - resultType string). - :type new_cluster_driver_node_type: object - :param new_cluster_init_scripts: User-defined initialization scripts for the new cluster. Type: - array of strings (or Expression with resultType array of strings). - :type new_cluster_init_scripts: object - :param new_cluster_enable_elastic_disk: Enable the elastic disk on the new cluster. This - property is now ignored, and takes the default elastic disk behavior in Databricks (elastic - disks are always enabled). Type: boolean (or Expression with resultType boolean). - :type new_cluster_enable_elastic_disk: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :type database: object """ _validation = { 'type': {'required': True}, - 'domain': {'required': True}, - 'access_token': {'required': True}, + 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'domain': {'key': 'typeProperties.domain', 'type': 'object'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'existing_cluster_id': {'key': 'typeProperties.existingClusterId', 'type': 'object'}, - 'instance_pool_id': {'key': 'typeProperties.instancePoolId', 'type': 'object'}, - 'new_cluster_version': {'key': 'typeProperties.newClusterVersion', 'type': 'object'}, - 'new_cluster_num_of_worker': {'key': 'typeProperties.newClusterNumOfWorker', 'type': 'object'}, - 'new_cluster_node_type': {'key': 'typeProperties.newClusterNodeType', 'type': 'object'}, - 'new_cluster_spark_conf': {'key': 'typeProperties.newClusterSparkConf', 'type': '{object}'}, - 'new_cluster_spark_env_vars': {'key': 'typeProperties.newClusterSparkEnvVars', 'type': '{object}'}, - 'new_cluster_custom_tags': {'key': 'typeProperties.newClusterCustomTags', 'type': '{object}'}, - 'new_cluster_log_destination': {'key': 'typeProperties.newClusterLogDestination', 'type': 'object'}, - 'new_cluster_driver_node_type': {'key': 'typeProperties.newClusterDriverNodeType', 'type': 'object'}, - 'new_cluster_init_scripts': {'key': 'typeProperties.newClusterInitScripts', 'type': 'object'}, - 'new_cluster_enable_elastic_disk': {'key': 'typeProperties.newClusterEnableElasticDisk', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzureDatabricksLinkedService, self).__init__(**kwargs) - self.type = 'AzureDatabricks' # type: str - self.domain = kwargs['domain'] - self.access_token = kwargs['access_token'] - self.existing_cluster_id = kwargs.get('existing_cluster_id', None) - self.instance_pool_id = kwargs.get('instance_pool_id', None) - self.new_cluster_version = kwargs.get('new_cluster_version', None) - self.new_cluster_num_of_worker = kwargs.get('new_cluster_num_of_worker', None) - self.new_cluster_node_type = kwargs.get('new_cluster_node_type', None) - self.new_cluster_spark_conf = kwargs.get('new_cluster_spark_conf', None) - self.new_cluster_spark_env_vars = kwargs.get('new_cluster_spark_env_vars', None) - self.new_cluster_custom_tags = kwargs.get('new_cluster_custom_tags', None) - self.new_cluster_log_destination = kwargs.get('new_cluster_log_destination', None) - self.new_cluster_driver_node_type = kwargs.get('new_cluster_driver_node_type', None) - self.new_cluster_init_scripts = kwargs.get('new_cluster_init_scripts', None) - self.new_cluster_enable_elastic_disk = kwargs.get('new_cluster_enable_elastic_disk', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(AzureDatabricksDeltaLakeDataset, self).__init__(**kwargs) + self.type = 'AzureDatabricksDeltaLakeDataset' # type: str + self.table = kwargs.get('table', None) + self.database = kwargs.get('database', None) -class ExecutionActivity(Activity): - """Base class for all execution activities. +class ExportSettings(msrest.serialization.Model): + """Export command settings. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureDataExplorerCommandActivity, AzureFunctionActivity, AzureMlBatchExecutionActivity, AzureMlExecutePipelineActivity, AzureMlUpdateResourceActivity, CopyActivity, CustomActivity, DataLakeAnalyticsUsqlActivity, DatabricksNotebookActivity, DatabricksSparkJarActivity, DatabricksSparkPythonActivity, DeleteActivity, ExecuteDataFlowActivity, ExecuteSsisPackageActivity, GetMetadataActivity, HdInsightHiveActivity, HdInsightMapReduceActivity, HdInsightPigActivity, HdInsightSparkActivity, HdInsightStreamingActivity, LookupActivity, SQLServerStoredProcedureActivity, WebActivity. + sub-classes are: AzureDatabricksDeltaLakeExportCommand, SnowflakeExportCopyCommand. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. The export setting type.Constant filled by server. :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~data_factory_management_client.models.ActivityPolicy """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, } _subtype_map = { - 'type': {'AzureDataExplorerCommand': 'AzureDataExplorerCommandActivity', 'AzureFunctionActivity': 'AzureFunctionActivity', 'AzureMLBatchExecution': 'AzureMlBatchExecutionActivity', 'AzureMLExecutePipeline': 'AzureMlExecutePipelineActivity', 'AzureMLUpdateResource': 'AzureMlUpdateResourceActivity', 'Copy': 'CopyActivity', 'Custom': 'CustomActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUsqlActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'Delete': 'DeleteActivity', 'ExecuteDataFlow': 'ExecuteDataFlowActivity', 'ExecuteSSISPackage': 'ExecuteSsisPackageActivity', 'GetMetadata': 'GetMetadataActivity', 'HDInsightHive': 'HdInsightHiveActivity', 'HDInsightMapReduce': 'HdInsightMapReduceActivity', 'HDInsightPig': 'HdInsightPigActivity', 'HDInsightSpark': 'HdInsightSparkActivity', 'HDInsightStreaming': 'HdInsightStreamingActivity', 'Lookup': 'LookupActivity', 'SqlServerStoredProcedure': 'SQLServerStoredProcedureActivity', 'WebActivity': 'WebActivity'} + 'type': {'AzureDatabricksDeltaLakeExportCommand': 'AzureDatabricksDeltaLakeExportCommand', 'SnowflakeExportCopyCommand': 'SnowflakeExportCopyCommand'} } def __init__( self, **kwargs ): - super(ExecutionActivity, self).__init__(**kwargs) - self.type = 'Execution' # type: str - self.linked_service_name = kwargs.get('linked_service_name', None) - self.policy = kwargs.get('policy', None) + super(ExportSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = 'ExportSettings' # type: str -class AzureDataExplorerCommandActivity(ExecutionActivity): - """Azure Data Explorer command activity. +class AzureDatabricksDeltaLakeExportCommand(ExportSettings): + """Azure Databricks Delta Lake export command settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. The export setting type.Constant filled by server. :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~data_factory_management_client.models.ActivityPolicy - :param command: Required. A control command, according to the Azure Data Explorer command - syntax. Type: string (or Expression with resultType string). - :type command: object - :param command_timeout: Control command timeout. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))..). - :type command_timeout: object + :param date_format: Specify the date format for the csv in Azure Databricks Delta Lake Copy. + Type: string (or Expression with resultType string). + :type date_format: object + :param timestamp_format: Specify the timestamp format for the csv in Azure Databricks Delta + Lake Copy. Type: string (or Expression with resultType string). + :type timestamp_format: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'command': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'command': {'key': 'typeProperties.command', 'type': 'object'}, - 'command_timeout': {'key': 'typeProperties.commandTimeout', 'type': 'object'}, + 'date_format': {'key': 'dateFormat', 'type': 'object'}, + 'timestamp_format': {'key': 'timestampFormat', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzureDataExplorerCommandActivity, self).__init__(**kwargs) - self.type = 'AzureDataExplorerCommand' # type: str - self.command = kwargs['command'] - self.command_timeout = kwargs.get('command_timeout', None) + super(AzureDatabricksDeltaLakeExportCommand, self).__init__(**kwargs) + self.type = 'AzureDatabricksDeltaLakeExportCommand' # type: str + self.date_format = kwargs.get('date_format', None) + self.timestamp_format = kwargs.get('timestamp_format', None) -class AzureDataExplorerLinkedService(LinkedService): - """Azure Data Explorer (Kusto) linked service. +class ImportSettings(msrest.serialization.Model): + """Import command settings. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureDatabricksDeltaLakeImportCommand, SnowflakeImportCopyCommand. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. The import setting type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param endpoint: Required. The endpoint of Azure Data Explorer (the engine's endpoint). URL - will be in the format https://:code:``.:code:``.kusto.windows.net. - Type: string (or Expression with resultType string). - :type endpoint: object - :param service_principal_id: Required. The ID of the service principal used to authenticate - against Azure Data Explorer. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: Required. The key of the service principal used to authenticate - against Kusto. - :type service_principal_key: ~data_factory_management_client.models.SecretBase - :param database: Required. Database name for connection. Type: string (or Expression with - resultType string). - :type database: object - :param tenant: Required. The name or ID of the tenant to which the service principal belongs. - Type: string (or Expression with resultType string). - :type tenant: object """ _validation = { 'type': {'required': True}, - 'endpoint': {'required': True}, - 'service_principal_id': {'required': True}, - 'service_principal_key': {'required': True}, - 'database': {'required': True}, - 'tenant': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + } + + _subtype_map = { + 'type': {'AzureDatabricksDeltaLakeImportCommand': 'AzureDatabricksDeltaLakeImportCommand', 'SnowflakeImportCopyCommand': 'SnowflakeImportCopyCommand'} } def __init__( self, **kwargs ): - super(AzureDataExplorerLinkedService, self).__init__(**kwargs) - self.type = 'AzureDataExplorer' # type: str - self.endpoint = kwargs['endpoint'] - self.service_principal_id = kwargs['service_principal_id'] - self.service_principal_key = kwargs['service_principal_key'] - self.database = kwargs['database'] - self.tenant = kwargs['tenant'] - - + super(ImportSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = 'ImportSettings' # type: str + + +class AzureDatabricksDeltaLakeImportCommand(ImportSettings): + """Azure Databricks Delta Lake import command settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The import setting type.Constant filled by server. + :type type: str + :param date_format: Specify the date format for csv in Azure Databricks Delta Lake Copy. Type: + string (or Expression with resultType string). + :type date_format: object + :param timestamp_format: Specify the timestamp format for csv in Azure Databricks Delta Lake + Copy. Type: string (or Expression with resultType string). + :type timestamp_format: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'date_format': {'key': 'dateFormat', 'type': 'object'}, + 'timestamp_format': {'key': 'timestampFormat', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureDatabricksDeltaLakeImportCommand, self).__init__(**kwargs) + self.type = 'AzureDatabricksDeltaLakeImportCommand' # type: str + self.date_format = kwargs.get('date_format', None) + self.timestamp_format = kwargs.get('timestamp_format', None) + + +class AzureDatabricksDeltaLakeLinkedService(LinkedService): + """Azure Databricks Delta Lake linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks + deployment. Type: string (or Expression with resultType string). + :type domain: object + :param access_token: Access token for databricks REST API. Refer to + https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type access_token: ~data_factory_management_client.models.SecretBase + :param cluster_id: The id of an existing interactive cluster that will be used for all runs of + this job. Type: string (or Expression with resultType string). + :type cluster_id: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'domain': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'domain': {'key': 'typeProperties.domain', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'cluster_id': {'key': 'typeProperties.clusterId', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureDatabricksDeltaLakeLinkedService, self).__init__(**kwargs) + self.type = 'AzureDatabricksDeltaLake' # type: str + self.domain = kwargs['domain'] + self.access_token = kwargs.get('access_token', None) + self.cluster_id = kwargs.get('cluster_id', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class AzureDatabricksDeltaLakeSink(CopySink): + """A copy activity Azure Databricks Delta Lake sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + string). + :type pre_copy_script: object + :param import_settings: Azure Databricks Delta Lake import settings. + :type import_settings: + ~data_factory_management_client.models.AzureDatabricksDeltaLakeImportCommand + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'import_settings': {'key': 'importSettings', 'type': 'AzureDatabricksDeltaLakeImportCommand'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureDatabricksDeltaLakeSink, self).__init__(**kwargs) + self.type = 'AzureDatabricksDeltaLakeSink' # type: str + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.import_settings = kwargs.get('import_settings', None) + + +class AzureDatabricksDeltaLakeSource(CopySource): + """A copy activity Azure Databricks Delta Lake source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: Azure Databricks Delta Lake Sql query. Type: string (or Expression with + resultType string). + :type query: object + :param export_settings: Azure Databricks Delta Lake export settings. + :type export_settings: + ~data_factory_management_client.models.AzureDatabricksDeltaLakeExportCommand + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + 'export_settings': {'key': 'exportSettings', 'type': 'AzureDatabricksDeltaLakeExportCommand'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureDatabricksDeltaLakeSource, self).__init__(**kwargs) + self.type = 'AzureDatabricksDeltaLakeSource' # type: str + self.query = kwargs.get('query', None) + self.export_settings = kwargs.get('export_settings', None) + + +class AzureDatabricksLinkedService(LinkedService): + """Azure Databricks linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks + deployment. Type: string (or Expression with resultType string). + :type domain: object + :param access_token: Required. Access token for databricks REST API. Refer to + https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string (or Expression + with resultType string). + :type access_token: ~data_factory_management_client.models.SecretBase + :param existing_cluster_id: The id of an existing interactive cluster that will be used for all + runs of this activity. Type: string (or Expression with resultType string). + :type existing_cluster_id: object + :param instance_pool_id: The id of an existing instance pool that will be used for all runs of + this activity. Type: string (or Expression with resultType string). + :type instance_pool_id: object + :param new_cluster_version: If not using an existing interactive cluster, this specifies the + Spark version of a new job cluster or instance pool nodes created for each run of this + activity. Required if instancePoolId is specified. Type: string (or Expression with resultType + string). + :type new_cluster_version: object + :param new_cluster_num_of_worker: If not using an existing interactive cluster, this specifies + the number of worker nodes to use for the new job cluster or instance pool. For new job + clusters, this a string-formatted Int32, like '1' means numOfWorker is 1 or '1:10' means auto- + scale from 1 (min) to 10 (max). For instance pools, this is a string-formatted Int32, and can + only specify a fixed number of worker nodes, such as '2'. Required if newClusterVersion is + specified. Type: string (or Expression with resultType string). + :type new_cluster_num_of_worker: object + :param new_cluster_node_type: The node type of the new job cluster. This property is required + if newClusterVersion is specified and instancePoolId is not specified. If instancePoolId is + specified, this property is ignored. Type: string (or Expression with resultType string). + :type new_cluster_node_type: object + :param new_cluster_spark_conf: A set of optional, user-specified Spark configuration key-value + pairs. + :type new_cluster_spark_conf: dict[str, object] + :param new_cluster_spark_env_vars: A set of optional, user-specified Spark environment + variables key-value pairs. + :type new_cluster_spark_env_vars: dict[str, object] + :param new_cluster_custom_tags: Additional tags for cluster resources. This property is ignored + in instance pool configurations. + :type new_cluster_custom_tags: dict[str, object] + :param new_cluster_log_destination: Specify a location to deliver Spark driver, worker, and + event logs. Type: string (or Expression with resultType string). + :type new_cluster_log_destination: object + :param new_cluster_driver_node_type: The driver node type for the new job cluster. This + property is ignored in instance pool configurations. Type: string (or Expression with + resultType string). + :type new_cluster_driver_node_type: object + :param new_cluster_init_scripts: User-defined initialization scripts for the new cluster. Type: + array of strings (or Expression with resultType array of strings). + :type new_cluster_init_scripts: object + :param new_cluster_enable_elastic_disk: Enable the elastic disk on the new cluster. This + property is now ignored, and takes the default elastic disk behavior in Databricks (elastic + disks are always enabled). Type: boolean (or Expression with resultType boolean). + :type new_cluster_enable_elastic_disk: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'domain': {'required': True}, + 'access_token': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'domain': {'key': 'typeProperties.domain', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'existing_cluster_id': {'key': 'typeProperties.existingClusterId', 'type': 'object'}, + 'instance_pool_id': {'key': 'typeProperties.instancePoolId', 'type': 'object'}, + 'new_cluster_version': {'key': 'typeProperties.newClusterVersion', 'type': 'object'}, + 'new_cluster_num_of_worker': {'key': 'typeProperties.newClusterNumOfWorker', 'type': 'object'}, + 'new_cluster_node_type': {'key': 'typeProperties.newClusterNodeType', 'type': 'object'}, + 'new_cluster_spark_conf': {'key': 'typeProperties.newClusterSparkConf', 'type': '{object}'}, + 'new_cluster_spark_env_vars': {'key': 'typeProperties.newClusterSparkEnvVars', 'type': '{object}'}, + 'new_cluster_custom_tags': {'key': 'typeProperties.newClusterCustomTags', 'type': '{object}'}, + 'new_cluster_log_destination': {'key': 'typeProperties.newClusterLogDestination', 'type': 'object'}, + 'new_cluster_driver_node_type': {'key': 'typeProperties.newClusterDriverNodeType', 'type': 'object'}, + 'new_cluster_init_scripts': {'key': 'typeProperties.newClusterInitScripts', 'type': 'object'}, + 'new_cluster_enable_elastic_disk': {'key': 'typeProperties.newClusterEnableElasticDisk', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureDatabricksLinkedService, self).__init__(**kwargs) + self.type = 'AzureDatabricks' # type: str + self.domain = kwargs['domain'] + self.access_token = kwargs['access_token'] + self.existing_cluster_id = kwargs.get('existing_cluster_id', None) + self.instance_pool_id = kwargs.get('instance_pool_id', None) + self.new_cluster_version = kwargs.get('new_cluster_version', None) + self.new_cluster_num_of_worker = kwargs.get('new_cluster_num_of_worker', None) + self.new_cluster_node_type = kwargs.get('new_cluster_node_type', None) + self.new_cluster_spark_conf = kwargs.get('new_cluster_spark_conf', None) + self.new_cluster_spark_env_vars = kwargs.get('new_cluster_spark_env_vars', None) + self.new_cluster_custom_tags = kwargs.get('new_cluster_custom_tags', None) + self.new_cluster_log_destination = kwargs.get('new_cluster_log_destination', None) + self.new_cluster_driver_node_type = kwargs.get('new_cluster_driver_node_type', None) + self.new_cluster_init_scripts = kwargs.get('new_cluster_init_scripts', None) + self.new_cluster_enable_elastic_disk = kwargs.get('new_cluster_enable_elastic_disk', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class ExecutionActivity(Activity): + """Base class for all execution activities. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureDataExplorerCommandActivity, AzureFunctionActivity, AzureMlBatchExecutionActivity, AzureMlExecutePipelineActivity, AzureMlUpdateResourceActivity, CopyActivity, CustomActivity, DataLakeAnalyticsUsqlActivity, DatabricksNotebookActivity, DatabricksSparkJarActivity, DatabricksSparkPythonActivity, DeleteActivity, ExecuteDataFlowActivity, ExecuteSsisPackageActivity, GetMetadataActivity, HdInsightHiveActivity, HdInsightMapReduceActivity, HdInsightPigActivity, HdInsightSparkActivity, HdInsightStreamingActivity, LookupActivity, SQLServerStoredProcedureActivity, WebActivity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~data_factory_management_client.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~data_factory_management_client.models.ActivityPolicy + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + } + + _subtype_map = { + 'type': {'AzureDataExplorerCommand': 'AzureDataExplorerCommandActivity', 'AzureFunctionActivity': 'AzureFunctionActivity', 'AzureMLBatchExecution': 'AzureMlBatchExecutionActivity', 'AzureMLExecutePipeline': 'AzureMlExecutePipelineActivity', 'AzureMLUpdateResource': 'AzureMlUpdateResourceActivity', 'Copy': 'CopyActivity', 'Custom': 'CustomActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUsqlActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'Delete': 'DeleteActivity', 'ExecuteDataFlow': 'ExecuteDataFlowActivity', 'ExecuteSSISPackage': 'ExecuteSsisPackageActivity', 'GetMetadata': 'GetMetadataActivity', 'HDInsightHive': 'HdInsightHiveActivity', 'HDInsightMapReduce': 'HdInsightMapReduceActivity', 'HDInsightPig': 'HdInsightPigActivity', 'HDInsightSpark': 'HdInsightSparkActivity', 'HDInsightStreaming': 'HdInsightStreamingActivity', 'Lookup': 'LookupActivity', 'SqlServerStoredProcedure': 'SQLServerStoredProcedureActivity', 'WebActivity': 'WebActivity'} + } + + def __init__( + self, + **kwargs + ): + super(ExecutionActivity, self).__init__(**kwargs) + self.type = 'Execution' # type: str + self.linked_service_name = kwargs.get('linked_service_name', None) + self.policy = kwargs.get('policy', None) + + +class AzureDataExplorerCommandActivity(ExecutionActivity): + """Azure Data Explorer command activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~data_factory_management_client.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~data_factory_management_client.models.ActivityPolicy + :param command: Required. A control command, according to the Azure Data Explorer command + syntax. Type: string (or Expression with resultType string). + :type command: object + :param command_timeout: Control command timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))..). + :type command_timeout: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'command': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'command': {'key': 'typeProperties.command', 'type': 'object'}, + 'command_timeout': {'key': 'typeProperties.commandTimeout', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureDataExplorerCommandActivity, self).__init__(**kwargs) + self.type = 'AzureDataExplorerCommand' # type: str + self.command = kwargs['command'] + self.command_timeout = kwargs.get('command_timeout', None) + + +class AzureDataExplorerLinkedService(LinkedService): + """Azure Data Explorer (Kusto) linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param endpoint: Required. The endpoint of Azure Data Explorer (the engine's endpoint). URL + will be in the format https://:code:``.:code:``.kusto.windows.net. + Type: string (or Expression with resultType string). + :type endpoint: object + :param service_principal_id: Required. The ID of the service principal used to authenticate + against Azure Data Explorer. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. The key of the service principal used to authenticate + against Kusto. + :type service_principal_key: ~data_factory_management_client.models.SecretBase + :param database: Required. Database name for connection. Type: string (or Expression with + resultType string). + :type database: object + :param tenant: Required. The name or ID of the tenant to which the service principal belongs. + Type: string (or Expression with resultType string). + :type tenant: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, + 'database': {'required': True}, + 'tenant': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureDataExplorerLinkedService, self).__init__(**kwargs) + self.type = 'AzureDataExplorer' # type: str + self.endpoint = kwargs['endpoint'] + self.service_principal_id = kwargs['service_principal_id'] + self.service_principal_key = kwargs['service_principal_key'] + self.database = kwargs['database'] + self.tenant = kwargs['tenant'] + + class AzureDataExplorerSink(CopySink): """A copy activity Azure Data Explorer sink. @@ -7699,7 +8108,7 @@ class CompressionReadSettings(msrest.serialization.Model): """Compression read settings. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: ZipDeflateReadSettings. + sub-classes are: TarGZipReadSettings, TarReadSettings, ZipDeflateReadSettings. All required parameters must be populated in order to send to Azure. @@ -7720,7 +8129,7 @@ class CompressionReadSettings(msrest.serialization.Model): } _subtype_map = { - 'type': {'ZipDeflateReadSettings': 'ZipDeflateReadSettings'} + 'type': {'TarGZipReadSettings': 'TarGZipReadSettings', 'TarReadSettings': 'TarReadSettings', 'ZipDeflateReadSettings': 'ZipDeflateReadSettings'} } def __init__( @@ -7750,6 +8159,9 @@ class ConcurLinkedService(LinkedService): :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] + :param connection_properties: Properties used to connect to Concur. It is mutually exclusive + with any other properties in the linked service. Type: object. + :type connection_properties: object :param client_id: Required. Application client_id supplied by Concur App Management. :type client_id: object :param username: Required. The user name that you use to access Concur Service. @@ -7786,6 +8198,7 @@ class ConcurLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_properties': {'key': 'typeProperties.connectionProperties', 'type': 'object'}, 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, @@ -7801,6 +8214,7 @@ def __init__( ): super(ConcurLinkedService, self).__init__(**kwargs) self.type = 'Concur' # type: str + self.connection_properties = kwargs.get('connection_properties', None) self.client_id = kwargs['client_id'] self.username = kwargs['username'] self.password = kwargs.get('password', None) @@ -8050,9 +8464,11 @@ class CopyActivity(ExecutionActivity): EnableSkipIncompatibleRow is true. :type redirect_incompatible_row_settings: ~data_factory_management_client.models.RedirectIncompatibleRowSettings - :param log_storage_settings: Log storage settings customer need to provide when enabling - session log. + :param log_storage_settings: (Deprecated. Please use LogSettings) Log storage settings customer + need to provide when enabling session log. :type log_storage_settings: ~data_factory_management_client.models.LogStorageSettings + :param log_settings: Log settings customer needs provide when enabling log. + :type log_settings: ~data_factory_management_client.models.LogSettings :param preserve_rules: Preserve Rules. :type preserve_rules: list[object] :param preserve: Preserve rules. @@ -8092,6 +8508,7 @@ class CopyActivity(ExecutionActivity): 'enable_skip_incompatible_row': {'key': 'typeProperties.enableSkipIncompatibleRow', 'type': 'object'}, 'redirect_incompatible_row_settings': {'key': 'typeProperties.redirectIncompatibleRowSettings', 'type': 'RedirectIncompatibleRowSettings'}, 'log_storage_settings': {'key': 'typeProperties.logStorageSettings', 'type': 'LogStorageSettings'}, + 'log_settings': {'key': 'typeProperties.logSettings', 'type': 'LogSettings'}, 'preserve_rules': {'key': 'typeProperties.preserveRules', 'type': '[object]'}, 'preserve': {'key': 'typeProperties.preserve', 'type': '[object]'}, 'validate_data_consistency': {'key': 'typeProperties.validateDataConsistency', 'type': 'object'}, @@ -8116,12 +8533,38 @@ def __init__( self.enable_skip_incompatible_row = kwargs.get('enable_skip_incompatible_row', None) self.redirect_incompatible_row_settings = kwargs.get('redirect_incompatible_row_settings', None) self.log_storage_settings = kwargs.get('log_storage_settings', None) + self.log_settings = kwargs.get('log_settings', None) self.preserve_rules = kwargs.get('preserve_rules', None) self.preserve = kwargs.get('preserve', None) self.validate_data_consistency = kwargs.get('validate_data_consistency', None) self.skip_error_file = kwargs.get('skip_error_file', None) +class CopyActivityLogSettings(msrest.serialization.Model): + """Settings for copy activity log. + + :param log_level: Gets or sets the log level, support: Info, Warning. Type: string (or + Expression with resultType string). + :type log_level: object + :param enable_reliable_logging: Specifies whether to enable reliable logging. Type: boolean (or + Expression with resultType boolean). + :type enable_reliable_logging: object + """ + + _attribute_map = { + 'log_level': {'key': 'logLevel', 'type': 'object'}, + 'enable_reliable_logging': {'key': 'enableReliableLogging', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(CopyActivityLogSettings, self).__init__(**kwargs) + self.log_level = kwargs.get('log_level', None) + self.enable_reliable_logging = kwargs.get('enable_reliable_logging', None) + + class CopyTranslator(msrest.serialization.Model): """A copy activity translator. @@ -10063,7 +10506,7 @@ class DatasetCompression(msrest.serialization.Model): """The compression method used on a dataset. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: DatasetBZip2Compression, DatasetDeflateCompression, DatasetGZipCompression, DatasetZipDeflateCompression. + sub-classes are: DatasetBZip2Compression, DatasetDeflateCompression, DatasetGZipCompression, DatasetTarCompression, DatasetTarGZipCompression, DatasetZipDeflateCompression. All required parameters must be populated in order to send to Azure. @@ -10084,7 +10527,7 @@ class DatasetCompression(msrest.serialization.Model): } _subtype_map = { - 'type': {'BZip2': 'DatasetBZip2Compression', 'Deflate': 'DatasetDeflateCompression', 'GZip': 'DatasetGZipCompression', 'ZipDeflate': 'DatasetZipDeflateCompression'} + 'type': {'BZip2': 'DatasetBZip2Compression', 'Deflate': 'DatasetDeflateCompression', 'GZip': 'DatasetGZipCompression', 'Tar': 'DatasetTarCompression', 'TarGZip': 'DatasetTarGZipCompression', 'ZipDeflate': 'DatasetZipDeflateCompression'} } def __init__( @@ -10398,6 +10841,68 @@ def __init__( self.type = kwargs.get('type', None) +class DatasetTarCompression(DatasetCompression): + """The Tar archive method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset compression.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DatasetTarCompression, self).__init__(**kwargs) + self.type = 'Tar' # type: str + + +class DatasetTarGZipCompression(DatasetCompression): + """The TarGZip compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset compression.Constant filled by server. + :type type: str + :param level: The TarGZip compression level. Possible values include: "Optimal", "Fastest". + :type level: str or ~data_factory_management_client.models.DatasetCompressionLevel + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DatasetTarGZipCompression, self).__init__(**kwargs) + self.type = 'TarGZip' # type: str + self.level = kwargs.get('level', None) + + class DatasetZipDeflateCompression(DatasetCompression): """The ZipDeflate compression method used on a dataset. @@ -10778,7 +11283,7 @@ class DelimitedTextDataset(Dataset): resultType string). :type encoding_name: object :param compression_codec: Possible values include: "none", "gzip", "snappy", "lzo", "bzip2", - "deflate", "zipDeflate", "lz4". + "deflate", "zipDeflate", "lz4", "tar", "tarGZip". :type compression_codec: str or ~data_factory_management_client.models.CompressionCodec :param compression_level: The data compression method used for DelimitedText. Possible values include: "Optimal", "Fastest". @@ -11005,6 +11510,13 @@ class DelimitedTextWriteSettings(FormatWriteSettings): :param file_extension: Required. The file extension used to create the files. Type: string (or Expression with resultType string). :type file_extension: object + :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the + specified count. Type: integer (or Expression with resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + :code:``_:code:``.:code:`` when copy from non-file + based store without partitionOptions. Type: string (or Expression with resultType string). + :type file_name_prefix: object """ _validation = { @@ -11017,6 +11529,8 @@ class DelimitedTextWriteSettings(FormatWriteSettings): 'type': {'key': 'type', 'type': 'str'}, 'quote_all_text': {'key': 'quoteAllText', 'type': 'object'}, 'file_extension': {'key': 'fileExtension', 'type': 'object'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, } def __init__( @@ -11027,6 +11541,8 @@ def __init__( self.type = 'DelimitedTextWriteSettings' # type: str self.quote_all_text = kwargs.get('quote_all_text', None) self.file_extension = kwargs['file_extension'] + self.max_rows_per_file = kwargs.get('max_rows_per_file', None) + self.file_name_prefix = kwargs.get('file_name_prefix', None) class DependencyReference(msrest.serialization.Model): @@ -12744,16 +13260,17 @@ class ExecuteDataFlowActivityTypePropertiesCompute(msrest.serialization.Model): """Compute properties for data flow activity. :param compute_type: Compute type of the cluster which will execute data flow job. Possible - values include: "General", "MemoryOptimized", "ComputeOptimized". - :type compute_type: str or ~data_factory_management_client.models.DataFlowComputeType + values include: 'General', 'MemoryOptimized', 'ComputeOptimized'. Type: string (or Expression + with resultType string). + :type compute_type: object :param core_count: Core count of the cluster which will execute data flow job. Supported values - are: 8, 16, 32, 48, 80, 144 and 272. - :type core_count: int + are: 8, 16, 32, 48, 80, 144 and 272. Type: integer (or Expression with resultType integer). + :type core_count: object """ _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'core_count': {'key': 'coreCount', 'type': 'int'}, + 'compute_type': {'key': 'computeType', 'type': 'object'}, + 'core_count': {'key': 'coreCount', 'type': 'object'}, } def __init__( @@ -12927,43 +13444,6 @@ def __init__( self.log_location = kwargs.get('log_location', None) -class ExportSettings(msrest.serialization.Model): - """Export command settings. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SnowflakeExportCopyCommand. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The export setting type.Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'SnowflakeExportCopyCommand': 'SnowflakeExportCopyCommand'} - } - - def __init__( - self, - **kwargs - ): - super(ExportSettings, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'ExportSettings' # type: str - - class ExposureControlBatchRequest(msrest.serialization.Model): """A list of exposure control features. @@ -13183,6 +13663,9 @@ class Factory(Resource): :param global_parameters: List of parameters for factory. :type global_parameters: dict[str, ~data_factory_management_client.models.GlobalParameterSpecification] + :param public_network_access: Whether or not public network access is allowed for the data + factory. Possible values include: "Enabled", "Disabled". + :type public_network_access: str or ~data_factory_management_client.models.PublicNetworkAccess """ _validation = { @@ -13209,6 +13692,7 @@ class Factory(Resource): 'version': {'key': 'properties.version', 'type': 'str'}, 'repo_configuration': {'key': 'properties.repoConfiguration', 'type': 'FactoryRepoConfiguration'}, 'global_parameters': {'key': 'properties.globalParameters', 'type': '{GlobalParameterSpecification}'}, + 'public_network_access': {'key': 'properties.publicNetworkAccess', 'type': 'str'}, } def __init__( @@ -13223,6 +13707,7 @@ def __init__( self.version = None self.repo_configuration = kwargs.get('repo_configuration', None) self.global_parameters = kwargs.get('global_parameters', None) + self.public_network_access = kwargs.get('public_network_access', None) class FactoryRepoConfiguration(msrest.serialization.Model): @@ -17446,43 +17931,6 @@ def __init__( self.query = kwargs.get('query', None) -class ImportSettings(msrest.serialization.Model): - """Import command settings. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SnowflakeImportCopyCommand. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The import setting type.Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'SnowflakeImportCopyCommand': 'SnowflakeImportCopyCommand'} - } - - def __init__( - self, - **kwargs - ): - super(ImportSettings, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'ImportSettings' # type: str - - class InformixLinkedService(LinkedService): """Informix linked service. @@ -19269,62 +19717,128 @@ class LinkedServiceReference(msrest.serialization.Model): 'parameters': {'key': 'parameters', 'type': '{object}'}, } - type = "LinkedServiceReference" - + type = "LinkedServiceReference" + + def __init__( + self, + **kwargs + ): + super(LinkedServiceReference, self).__init__(**kwargs) + self.reference_name = kwargs['reference_name'] + self.parameters = kwargs.get('parameters', None) + + +class LinkedServiceResource(SubResource): + """Linked service resource type. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Required. Properties of linked service. + :type properties: ~data_factory_management_client.models.LinkedService + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'LinkedService'}, + } + + def __init__( + self, + **kwargs + ): + super(LinkedServiceResource, self).__init__(**kwargs) + self.properties = kwargs['properties'] + + +class LogLocationSettings(msrest.serialization.Model): + """Log location settings. + + All required parameters must be populated in order to send to Azure. + + :param linked_service_name: Required. Log storage linked service reference. + :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :param path: The path to storage for storing detailed logs of activity execution. Type: string + (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'path': {'key': 'path', 'type': 'object'}, + } + def __init__( self, **kwargs ): - super(LinkedServiceReference, self).__init__(**kwargs) - self.reference_name = kwargs['reference_name'] - self.parameters = kwargs.get('parameters', None) - + super(LogLocationSettings, self).__init__(**kwargs) + self.linked_service_name = kwargs['linked_service_name'] + self.path = kwargs.get('path', None) -class LinkedServiceResource(SubResource): - """Linked service resource type. - Variables are only populated by the server, and will be ignored when sending a request. +class LogSettings(msrest.serialization.Model): + """Log settings. All required parameters must be populated in order to send to Azure. - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Properties of linked service. - :type properties: ~data_factory_management_client.models.LinkedService + :param enable_copy_activity_log: Specifies whether to enable copy activity log. Type: boolean + (or Expression with resultType boolean). + :type enable_copy_activity_log: object + :param copy_activity_log_settings: Specifies settings for copy activity log. + :type copy_activity_log_settings: + ~data_factory_management_client.models.CopyActivityLogSettings + :param log_location_settings: Required. Log location settings customer needs to provide when + enabling log. + :type log_location_settings: ~data_factory_management_client.models.LogLocationSettings """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, + 'log_location_settings': {'required': True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'LinkedService'}, + 'enable_copy_activity_log': {'key': 'enableCopyActivityLog', 'type': 'object'}, + 'copy_activity_log_settings': {'key': 'copyActivityLogSettings', 'type': 'CopyActivityLogSettings'}, + 'log_location_settings': {'key': 'logLocationSettings', 'type': 'LogLocationSettings'}, } def __init__( self, **kwargs ): - super(LinkedServiceResource, self).__init__(**kwargs) - self.properties = kwargs['properties'] + super(LogSettings, self).__init__(**kwargs) + self.enable_copy_activity_log = kwargs.get('enable_copy_activity_log', None) + self.copy_activity_log_settings = kwargs.get('copy_activity_log_settings', None) + self.log_location_settings = kwargs['log_location_settings'] class LogStorageSettings(msrest.serialization.Model): - """Log storage settings. + """(Deprecated. Please use LogSettings) Log storage settings. All required parameters must be populated in order to send to Azure. @@ -20588,72 +21102,295 @@ def __init__( self, **kwargs ): - super(MicrosoftAccessLinkedService, self).__init__(**kwargs) - self.type = 'MicrosoftAccess' # type: str - self.connection_string = kwargs['connection_string'] - self.authentication_type = kwargs.get('authentication_type', None) - self.credential = kwargs.get('credential', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(MicrosoftAccessLinkedService, self).__init__(**kwargs) + self.type = 'MicrosoftAccess' # type: str + self.connection_string = kwargs['connection_string'] + self.authentication_type = kwargs.get('authentication_type', None) + self.credential = kwargs.get('credential', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class MicrosoftAccessSink(CopySink): + """A copy activity Microsoft Access sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param pre_copy_script: A query to execute before starting the copy. Type: string (or + Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(MicrosoftAccessSink, self).__init__(**kwargs) + self.type = 'MicrosoftAccessSink' # type: str + self.pre_copy_script = kwargs.get('pre_copy_script', None) + + +class MicrosoftAccessSource(CopySource): + """A copy activity source for Microsoft Access. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: Database query. Type: string (or Expression with resultType string). + :type query: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + } + + def __init__( + self, + **kwargs + ): + super(MicrosoftAccessSource, self).__init__(**kwargs) + self.type = 'MicrosoftAccessSource' # type: str + self.query = kwargs.get('query', None) + self.additional_columns = kwargs.get('additional_columns', None) + + +class MicrosoftAccessTableDataset(Dataset): + """The Microsoft Access table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~data_factory_management_client.models.DatasetFolder + :param table_name: The Microsoft Access table name. Type: string (or Expression with resultType + string). + :type table_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(MicrosoftAccessTableDataset, self).__init__(**kwargs) + self.type = 'MicrosoftAccessTable' # type: str + self.table_name = kwargs.get('table_name', None) + + +class MongoDBAtlasCollectionDataset(Dataset): + """The MongoDB Atlas database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~data_factory_management_client.models.DatasetFolder + :param collection: Required. The collection name of the MongoDB Atlas database. Type: string + (or Expression with resultType string). + :type collection: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + 'collection': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(MongoDBAtlasCollectionDataset, self).__init__(**kwargs) + self.type = 'MongoDbAtlasCollection' # type: str + self.collection = kwargs['collection'] -class MicrosoftAccessSink(CopySink): - """A copy activity Microsoft Access sink. +class MongoDBAtlasLinkedService(LinkedService): + """Linked service for MongoDB Atlas data source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Copy sink type.Constant filled by server. + :param type: Required. Type of linked service.Constant filled by server. :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType - integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType - integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the sink data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param pre_copy_script: A query to execute before starting the copy. Type: string (or - Expression with resultType string). - :type pre_copy_script: object + :param connect_via: The integration runtime reference. + :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: Required. The MongoDB Atlas connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param database: Required. The name of the MongoDB Atlas database that you want to access. + Type: string (or Expression with resultType string). + :type database: object """ _validation = { 'type': {'required': True}, + 'connection_string': {'required': True}, + 'database': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, } def __init__( self, **kwargs ): - super(MicrosoftAccessSink, self).__init__(**kwargs) - self.type = 'MicrosoftAccessSink' # type: str - self.pre_copy_script = kwargs.get('pre_copy_script', None) + super(MongoDBAtlasLinkedService, self).__init__(**kwargs) + self.type = 'MongoDbAtlas' # type: str + self.connection_string = kwargs['connection_string'] + self.database = kwargs['database'] -class MicrosoftAccessSource(CopySource): - """A copy activity source for Microsoft Access. +class MongoDBAtlasSource(CopySource): + """A copy activity source for a MongoDB Atlas database. All required parameters must be populated in order to send to Azure. @@ -20671,8 +21408,20 @@ class MicrosoftAccessSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param query: Database query. Type: string (or Expression with resultType string). - :type query: object + :param filter: Specifies selection filter using query operators. To return all documents in a + collection, omit this parameter or pass an empty document ({}). Type: string (or Expression + with resultType string). + :type filter: object + :param cursor_methods: Cursor methods for Mongodb query. + :type cursor_methods: ~data_factory_management_client.models.MongoDBCursorMethodsProperties + :param batch_size: Specifies the number of documents to return in each batch of the response + from MongoDB Atlas instance. In most cases, modifying the batch size will not affect the user + or the application. This property's main purpose is to avoid hit the limitation of response + size. Type: integer (or Expression with resultType integer). + :type batch_size: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] @@ -20688,7 +21437,10 @@ class MicrosoftAccessSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'query': {'key': 'query', 'type': 'object'}, + 'filter': {'key': 'filter', 'type': 'object'}, + 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDBCursorMethodsProperties'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -20696,71 +21448,15 @@ def __init__( self, **kwargs ): - super(MicrosoftAccessSource, self).__init__(**kwargs) - self.type = 'MicrosoftAccessSource' # type: str - self.query = kwargs.get('query', None) + super(MongoDBAtlasSource, self).__init__(**kwargs) + self.type = 'MongoDbAtlasSource' # type: str + self.filter = kwargs.get('filter', None) + self.cursor_methods = kwargs.get('cursor_methods', None) + self.batch_size = kwargs.get('batch_size', None) + self.query_timeout = kwargs.get('query_timeout', None) self.additional_columns = kwargs.get('additional_columns', None) -class MicrosoftAccessTableDataset(Dataset): - """The Microsoft Access table dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~data_factory_management_client.models.DatasetFolder - :param table_name: The Microsoft Access table name. Type: string (or Expression with resultType - string). - :type table_name: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(MicrosoftAccessTableDataset, self).__init__(**kwargs) - self.type = 'MicrosoftAccessTable' # type: str - self.table_name = kwargs.get('table_name', None) - - class MongoDBCollectionDataset(Dataset): """The MongoDB database dataset. @@ -22997,7 +23693,7 @@ class OrcDataset(Dataset): :type folder: ~data_factory_management_client.models.DatasetFolder :param location: The location of the ORC data storage. :type location: ~data_factory_management_client.models.DatasetLocation - :param orc_compression_codec: Possible values include: "none", "zlib", "snappy". + :param orc_compression_codec: Possible values include: "none", "zlib", "snappy", "lzo". :type orc_compression_codec: str or ~data_factory_management_client.models.OrcCompressionCodec """ @@ -23092,6 +23788,8 @@ class OrcSink(CopySink): :type max_concurrent_connections: object :param store_settings: ORC store settings. :type store_settings: ~data_factory_management_client.models.StoreWriteSettings + :param format_settings: ORC format settings. + :type format_settings: ~data_factory_management_client.models.OrcWriteSettings """ _validation = { @@ -23107,6 +23805,7 @@ class OrcSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'OrcWriteSettings'}, } def __init__( @@ -23116,6 +23815,7 @@ def __init__( super(OrcSink, self).__init__(**kwargs) self.type = 'OrcSink' # type: str self.store_settings = kwargs.get('store_settings', None) + self.format_settings = kwargs.get('format_settings', None) class OrcSource(CopySource): @@ -23168,6 +23868,46 @@ def __init__( self.additional_columns = kwargs.get('additional_columns', None) +class OrcWriteSettings(FormatWriteSettings): + """Orc write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The write setting type.Constant filled by server. + :type type: str + :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the + specified count. Type: integer (or Expression with resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + :code:``_:code:``.:code:`` when copy from non-file + based store without partitionOptions. Type: string (or Expression with resultType string). + :type file_name_prefix: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(OrcWriteSettings, self).__init__(**kwargs) + self.type = 'OrcWriteSettings' # type: str + self.max_rows_per_file = kwargs.get('max_rows_per_file', None) + self.file_name_prefix = kwargs.get('file_name_prefix', None) + + class PackageStore(msrest.serialization.Model): """Package store for the SSIS integration runtime. @@ -23258,7 +23998,7 @@ class ParquetDataset(Dataset): :param location: The location of the parquet storage. :type location: ~data_factory_management_client.models.DatasetLocation :param compression_codec: Possible values include: "none", "gzip", "snappy", "lzo", "bzip2", - "deflate", "zipDeflate", "lz4". + "deflate", "zipDeflate", "lz4", "tar", "tarGZip". :type compression_codec: str or ~data_factory_management_client.models.CompressionCodec """ @@ -23353,6 +24093,8 @@ class ParquetSink(CopySink): :type max_concurrent_connections: object :param store_settings: Parquet store settings. :type store_settings: ~data_factory_management_client.models.StoreWriteSettings + :param format_settings: Parquet format settings. + :type format_settings: ~data_factory_management_client.models.ParquetWriteSettings """ _validation = { @@ -23368,6 +24110,7 @@ class ParquetSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'ParquetWriteSettings'}, } def __init__( @@ -23377,6 +24120,7 @@ def __init__( super(ParquetSink, self).__init__(**kwargs) self.type = 'ParquetSink' # type: str self.store_settings = kwargs.get('store_settings', None) + self.format_settings = kwargs.get('format_settings', None) class ParquetSource(CopySource): @@ -23429,6 +24173,46 @@ def __init__( self.additional_columns = kwargs.get('additional_columns', None) +class ParquetWriteSettings(FormatWriteSettings): + """Parquet write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The write setting type.Constant filled by server. + :type type: str + :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the + specified count. Type: integer (or Expression with resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + :code:``_:code:``.:code:`` when copy from non-file + based store without partitionOptions. Type: string (or Expression with resultType string). + :type file_name_prefix: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(ParquetWriteSettings, self).__init__(**kwargs) + self.type = 'ParquetWriteSettings' # type: str + self.max_rows_per_file = kwargs.get('max_rows_per_file', None) + self.file_name_prefix = kwargs.get('file_name_prefix', None) + + class PaypalLinkedService(LinkedService): """Paypal Service linked service. @@ -25537,13 +26321,9 @@ class RestSink(CopySink): :type http_request_timeout: object :param request_interval: The time to await before sending next request, in milliseconds. :type request_interval: object - :param compression_type: Compression Type to Send data in compressed format with Optimal - Compression Level, Default is None. And The Only Supported option is Gzip. - :type compression_type: object - :param wrap_request_json_in_an_object: Wraps Request Array Json into an Object before calling - the rest endpoint , Default is false. ex: if true request content sample format is { rows:[]} - else the format is []. - :type wrap_request_json_in_an_object: object + :param http_compression_type: Http Compression Type to Send data in compressed format with + Optimal Compression Level, Default is None. And The Only Supported option is Gzip. + :type http_compression_type: object """ _validation = { @@ -25562,8 +26342,7 @@ class RestSink(CopySink): 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, 'request_interval': {'key': 'requestInterval', 'type': 'object'}, - 'compression_type': {'key': 'compressionType', 'type': 'object'}, - 'wrap_request_json_in_an_object': {'key': 'wrapRequestJsonInAnObject', 'type': 'object'}, + 'http_compression_type': {'key': 'httpCompressionType', 'type': 'object'}, } def __init__( @@ -25576,8 +26355,7 @@ def __init__( self.additional_headers = kwargs.get('additional_headers', None) self.http_request_timeout = kwargs.get('http_request_timeout', None) self.request_interval = kwargs.get('request_interval', None) - self.compression_type = kwargs.get('compression_type', None) - self.wrap_request_json_in_an_object = kwargs.get('wrap_request_json_in_an_object', None) + self.http_compression_type = kwargs.get('http_compression_type', None) class RestSource(CopySource): @@ -31999,6 +32777,74 @@ def __init__( self.type_conversion_settings = kwargs.get('type_conversion_settings', None) +class TarGZipReadSettings(CompressionReadSettings): + """The TarGZip compression read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The Compression setting type.Constant filled by server. + :type type: str + :param preserve_compression_file_name_as_folder: Preserve the compression file name as folder + path. Type: boolean (or Expression with resultType boolean). + :type preserve_compression_file_name_as_folder: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'preserve_compression_file_name_as_folder': {'key': 'preserveCompressionFileNameAsFolder', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(TarGZipReadSettings, self).__init__(**kwargs) + self.type = 'TarGZipReadSettings' # type: str + self.preserve_compression_file_name_as_folder = kwargs.get('preserve_compression_file_name_as_folder', None) + + +class TarReadSettings(CompressionReadSettings): + """The Tar compression read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The Compression setting type.Constant filled by server. + :type type: str + :param preserve_compression_file_name_as_folder: Preserve the compression file name as folder + path. Type: boolean (or Expression with resultType boolean). + :type preserve_compression_file_name_as_folder: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'preserve_compression_file_name_as_folder': {'key': 'preserveCompressionFileNameAsFolder', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(TarReadSettings, self).__init__(**kwargs) + self.type = 'TarReadSettings' # type: str + self.preserve_compression_file_name_as_folder = kwargs.get('preserve_compression_file_name_as_folder', None) + + class TeradataLinkedService(LinkedService): """Linked service for Teradata data source. @@ -32688,7 +33534,7 @@ class TumblingWindowTrigger(Trigger): trigger window that is ready. :type pipeline: ~data_factory_management_client.models.TriggerPipelineReference :param frequency: Required. The frequency of the time windows. Possible values include: - "Minute", "Hour". + "Minute", "Hour", "Month". :type frequency: str or ~data_factory_management_client.models.TumblingWindowFrequency :param interval: Required. The interval of the time windows. The minimum interval allowed is 15 Minutes. diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models_py3.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models_py3.py index 060634eb408..21008e97f18 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models_py3.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models_py3.py @@ -378,7 +378,7 @@ class LinkedService(msrest.serialization.Model): """The Azure Data Factory nested object which contains the information and credential which can be used to connect with related store or compute resource. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonMwsLinkedService, AmazonRedshiftLinkedService, AmazonS3LinkedService, AzureBatchLinkedService, AzureBlobFsLinkedService, AzureBlobStorageLinkedService, AzureDataExplorerLinkedService, AzureDataLakeAnalyticsLinkedService, AzureDataLakeStoreLinkedService, AzureDatabricksLinkedService, AzureFileStorageLinkedService, AzureFunctionLinkedService, AzureKeyVaultLinkedService, AzureMlLinkedService, AzureMlServiceLinkedService, AzureMariaDBLinkedService, AzureMySQLLinkedService, AzurePostgreSQLLinkedService, AzureSearchLinkedService, AzureSQLDWLinkedService, AzureSQLDatabaseLinkedService, AzureSQLMiLinkedService, AzureStorageLinkedService, AzureTableStorageLinkedService, CassandraLinkedService, CommonDataServiceForAppsLinkedService, ConcurLinkedService, CosmosDBLinkedService, CosmosDBMongoDBApiLinkedService, CouchbaseLinkedService, CustomDataSourceLinkedService, Db2LinkedService, DrillLinkedService, DynamicsLinkedService, DynamicsAxLinkedService, DynamicsCrmLinkedService, EloquaLinkedService, FileServerLinkedService, FtpServerLinkedService, GoogleAdWordsLinkedService, GoogleBigQueryLinkedService, GoogleCloudStorageLinkedService, GreenplumLinkedService, HBaseLinkedService, HdInsightLinkedService, HdInsightOnDemandLinkedService, HdfsLinkedService, HiveLinkedService, HttpLinkedService, HubspotLinkedService, ImpalaLinkedService, InformixLinkedService, JiraLinkedService, MagentoLinkedService, MariaDBLinkedService, MarketoLinkedService, MicrosoftAccessLinkedService, MongoDBLinkedService, MongoDBV2LinkedService, MySQLLinkedService, NetezzaLinkedService, ODataLinkedService, OdbcLinkedService, Office365LinkedService, OracleLinkedService, OracleServiceCloudLinkedService, PaypalLinkedService, PhoenixLinkedService, PostgreSQLLinkedService, PrestoLinkedService, QuickBooksLinkedService, ResponsysLinkedService, RestServiceLinkedService, SalesforceLinkedService, SalesforceMarketingCloudLinkedService, SalesforceServiceCloudLinkedService, SapBwLinkedService, SapCloudForCustomerLinkedService, SapEccLinkedService, SapHanaLinkedService, SapOpenHubLinkedService, SapTableLinkedService, ServiceNowLinkedService, SftpServerLinkedService, SharePointOnlineListLinkedService, ShopifyLinkedService, SnowflakeLinkedService, SparkLinkedService, SQLServerLinkedService, SquareLinkedService, SybaseLinkedService, TeradataLinkedService, VerticaLinkedService, WebLinkedService, XeroLinkedService, ZohoLinkedService. + sub-classes are: AmazonMwsLinkedService, AmazonRedshiftLinkedService, AmazonS3LinkedService, AzureBatchLinkedService, AzureBlobFsLinkedService, AzureBlobStorageLinkedService, AzureDataExplorerLinkedService, AzureDataLakeAnalyticsLinkedService, AzureDataLakeStoreLinkedService, AzureDatabricksLinkedService, AzureDatabricksDeltaLakeLinkedService, AzureFileStorageLinkedService, AzureFunctionLinkedService, AzureKeyVaultLinkedService, AzureMlLinkedService, AzureMlServiceLinkedService, AzureMariaDBLinkedService, AzureMySQLLinkedService, AzurePostgreSQLLinkedService, AzureSearchLinkedService, AzureSQLDWLinkedService, AzureSQLDatabaseLinkedService, AzureSQLMiLinkedService, AzureStorageLinkedService, AzureTableStorageLinkedService, CassandraLinkedService, CommonDataServiceForAppsLinkedService, ConcurLinkedService, CosmosDBLinkedService, CosmosDBMongoDBApiLinkedService, CouchbaseLinkedService, CustomDataSourceLinkedService, Db2LinkedService, DrillLinkedService, DynamicsLinkedService, DynamicsAxLinkedService, DynamicsCrmLinkedService, EloquaLinkedService, FileServerLinkedService, FtpServerLinkedService, GoogleAdWordsLinkedService, GoogleBigQueryLinkedService, GoogleCloudStorageLinkedService, GreenplumLinkedService, HBaseLinkedService, HdInsightLinkedService, HdInsightOnDemandLinkedService, HdfsLinkedService, HiveLinkedService, HttpLinkedService, HubspotLinkedService, ImpalaLinkedService, InformixLinkedService, JiraLinkedService, MagentoLinkedService, MariaDBLinkedService, MarketoLinkedService, MicrosoftAccessLinkedService, MongoDBLinkedService, MongoDBAtlasLinkedService, MongoDBV2LinkedService, MySQLLinkedService, NetezzaLinkedService, ODataLinkedService, OdbcLinkedService, Office365LinkedService, OracleLinkedService, OracleServiceCloudLinkedService, PaypalLinkedService, PhoenixLinkedService, PostgreSQLLinkedService, PrestoLinkedService, QuickBooksLinkedService, ResponsysLinkedService, RestServiceLinkedService, SalesforceLinkedService, SalesforceMarketingCloudLinkedService, SalesforceServiceCloudLinkedService, SapBwLinkedService, SapCloudForCustomerLinkedService, SapEccLinkedService, SapHanaLinkedService, SapOpenHubLinkedService, SapTableLinkedService, ServiceNowLinkedService, SftpServerLinkedService, SharePointOnlineListLinkedService, ShopifyLinkedService, SnowflakeLinkedService, SparkLinkedService, SQLServerLinkedService, SquareLinkedService, SybaseLinkedService, TeradataLinkedService, VerticaLinkedService, WebLinkedService, XeroLinkedService, ZohoLinkedService. All required parameters must be populated in order to send to Azure. @@ -411,7 +411,7 @@ class LinkedService(msrest.serialization.Model): } _subtype_map = { - 'type': {'AmazonMWS': 'AmazonMwsLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureBlobFS': 'AzureBlobFsLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureFileStorage': 'AzureFileStorageLinkedService', 'AzureFunction': 'AzureFunctionLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureML': 'AzureMlLinkedService', 'AzureMLService': 'AzureMlServiceLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'AzureMySql': 'AzureMySQLLinkedService', 'AzurePostgreSql': 'AzurePostgreSQLLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'AzureSqlDW': 'AzureSQLDWLinkedService', 'AzureSqlDatabase': 'AzureSQLDatabaseLinkedService', 'AzureSqlMI': 'AzureSQLMiLinkedService', 'AzureStorage': 'AzureStorageLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'Cassandra': 'CassandraLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'Concur': 'ConcurLinkedService', 'CosmosDb': 'CosmosDBLinkedService', 'CosmosDbMongoDbApi': 'CosmosDBMongoDBApiLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'Db2': 'Db2LinkedService', 'Drill': 'DrillLinkedService', 'Dynamics': 'DynamicsLinkedService', 'DynamicsAX': 'DynamicsAxLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Eloqua': 'EloquaLinkedService', 'FileServer': 'FileServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'GoogleCloudStorage': 'GoogleCloudStorageLinkedService', 'Greenplum': 'GreenplumLinkedService', 'HBase': 'HBaseLinkedService', 'HDInsight': 'HdInsightLinkedService', 'HDInsightOnDemand': 'HdInsightOnDemandLinkedService', 'Hdfs': 'HdfsLinkedService', 'Hive': 'HiveLinkedService', 'HttpServer': 'HttpLinkedService', 'Hubspot': 'HubspotLinkedService', 'Impala': 'ImpalaLinkedService', 'Informix': 'InformixLinkedService', 'Jira': 'JiraLinkedService', 'Magento': 'MagentoLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Marketo': 'MarketoLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'MongoDb': 'MongoDBLinkedService', 'MongoDbV2': 'MongoDBV2LinkedService', 'MySql': 'MySQLLinkedService', 'Netezza': 'NetezzaLinkedService', 'OData': 'ODataLinkedService', 'Odbc': 'OdbcLinkedService', 'Office365': 'Office365LinkedService', 'Oracle': 'OracleLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'Paypal': 'PaypalLinkedService', 'Phoenix': 'PhoenixLinkedService', 'PostgreSql': 'PostgreSQLLinkedService', 'Presto': 'PrestoLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Responsys': 'ResponsysLinkedService', 'RestService': 'RestServiceLinkedService', 'Salesforce': 'SalesforceLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'SapBW': 'SapBwLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapTable': 'SapTableLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'Sftp': 'SftpServerLinkedService', 'SharePointOnlineList': 'SharePointOnlineListLinkedService', 'Shopify': 'ShopifyLinkedService', 'Snowflake': 'SnowflakeLinkedService', 'Spark': 'SparkLinkedService', 'SqlServer': 'SQLServerLinkedService', 'Square': 'SquareLinkedService', 'Sybase': 'SybaseLinkedService', 'Teradata': 'TeradataLinkedService', 'Vertica': 'VerticaLinkedService', 'Web': 'WebLinkedService', 'Xero': 'XeroLinkedService', 'Zoho': 'ZohoLinkedService'} + 'type': {'AmazonMWS': 'AmazonMwsLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureBlobFS': 'AzureBlobFsLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDatabricksDeltaLake': 'AzureDatabricksDeltaLakeLinkedService', 'AzureFileStorage': 'AzureFileStorageLinkedService', 'AzureFunction': 'AzureFunctionLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureML': 'AzureMlLinkedService', 'AzureMLService': 'AzureMlServiceLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'AzureMySql': 'AzureMySQLLinkedService', 'AzurePostgreSql': 'AzurePostgreSQLLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'AzureSqlDW': 'AzureSQLDWLinkedService', 'AzureSqlDatabase': 'AzureSQLDatabaseLinkedService', 'AzureSqlMI': 'AzureSQLMiLinkedService', 'AzureStorage': 'AzureStorageLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'Cassandra': 'CassandraLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'Concur': 'ConcurLinkedService', 'CosmosDb': 'CosmosDBLinkedService', 'CosmosDbMongoDbApi': 'CosmosDBMongoDBApiLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'Db2': 'Db2LinkedService', 'Drill': 'DrillLinkedService', 'Dynamics': 'DynamicsLinkedService', 'DynamicsAX': 'DynamicsAxLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Eloqua': 'EloquaLinkedService', 'FileServer': 'FileServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'GoogleCloudStorage': 'GoogleCloudStorageLinkedService', 'Greenplum': 'GreenplumLinkedService', 'HBase': 'HBaseLinkedService', 'HDInsight': 'HdInsightLinkedService', 'HDInsightOnDemand': 'HdInsightOnDemandLinkedService', 'Hdfs': 'HdfsLinkedService', 'Hive': 'HiveLinkedService', 'HttpServer': 'HttpLinkedService', 'Hubspot': 'HubspotLinkedService', 'Impala': 'ImpalaLinkedService', 'Informix': 'InformixLinkedService', 'Jira': 'JiraLinkedService', 'Magento': 'MagentoLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Marketo': 'MarketoLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'MongoDb': 'MongoDBLinkedService', 'MongoDbAtlas': 'MongoDBAtlasLinkedService', 'MongoDbV2': 'MongoDBV2LinkedService', 'MySql': 'MySQLLinkedService', 'Netezza': 'NetezzaLinkedService', 'OData': 'ODataLinkedService', 'Odbc': 'OdbcLinkedService', 'Office365': 'Office365LinkedService', 'Oracle': 'OracleLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'Paypal': 'PaypalLinkedService', 'Phoenix': 'PhoenixLinkedService', 'PostgreSql': 'PostgreSQLLinkedService', 'Presto': 'PrestoLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Responsys': 'ResponsysLinkedService', 'RestService': 'RestServiceLinkedService', 'Salesforce': 'SalesforceLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'SapBW': 'SapBwLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapTable': 'SapTableLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'Sftp': 'SftpServerLinkedService', 'SharePointOnlineList': 'SharePointOnlineListLinkedService', 'Shopify': 'ShopifyLinkedService', 'Snowflake': 'SnowflakeLinkedService', 'Spark': 'SparkLinkedService', 'SqlServer': 'SQLServerLinkedService', 'Square': 'SquareLinkedService', 'Sybase': 'SybaseLinkedService', 'Teradata': 'TeradataLinkedService', 'Vertica': 'VerticaLinkedService', 'Web': 'WebLinkedService', 'Xero': 'XeroLinkedService', 'Zoho': 'ZohoLinkedService'} } def __init__( @@ -547,7 +547,7 @@ class Dataset(msrest.serialization.Model): """The Azure Data Factory nested object which identifies data within different data stores, such as tables, files, folders, and documents. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonMwsObjectDataset, AmazonRedshiftTableDataset, AmazonS3Dataset, AvroDataset, AzureBlobDataset, AzureBlobFsDataset, AzureDataExplorerTableDataset, AzureDataLakeStoreDataset, AzureMariaDBTableDataset, AzureMySQLTableDataset, AzurePostgreSQLTableDataset, AzureSearchIndexDataset, AzureSQLDWTableDataset, AzureSQLMiTableDataset, AzureSQLTableDataset, AzureTableDataset, BinaryDataset, CassandraTableDataset, CommonDataServiceForAppsEntityDataset, ConcurObjectDataset, CosmosDBMongoDBApiCollectionDataset, CosmosDBSQLApiCollectionDataset, CouchbaseTableDataset, CustomDataset, Db2TableDataset, DelimitedTextDataset, DocumentDBCollectionDataset, DrillTableDataset, DynamicsAxResourceDataset, DynamicsCrmEntityDataset, DynamicsEntityDataset, EloquaObjectDataset, ExcelDataset, FileShareDataset, GoogleAdWordsObjectDataset, GoogleBigQueryObjectDataset, GreenplumTableDataset, HBaseObjectDataset, HiveObjectDataset, HttpDataset, HubspotObjectDataset, ImpalaObjectDataset, InformixTableDataset, JiraObjectDataset, JsonDataset, MagentoObjectDataset, MariaDBTableDataset, MarketoObjectDataset, MicrosoftAccessTableDataset, MongoDBCollectionDataset, MongoDBV2CollectionDataset, MySQLTableDataset, NetezzaTableDataset, ODataResourceDataset, OdbcTableDataset, Office365Dataset, OracleServiceCloudObjectDataset, OracleTableDataset, OrcDataset, ParquetDataset, PaypalObjectDataset, PhoenixObjectDataset, PostgreSQLTableDataset, PrestoObjectDataset, QuickBooksObjectDataset, RelationalTableDataset, ResponsysObjectDataset, RestResourceDataset, SalesforceMarketingCloudObjectDataset, SalesforceObjectDataset, SalesforceServiceCloudObjectDataset, SapBwCubeDataset, SapCloudForCustomerResourceDataset, SapEccResourceDataset, SapHanaTableDataset, SapOpenHubTableDataset, SapTableResourceDataset, ServiceNowObjectDataset, SharePointOnlineListResourceDataset, ShopifyObjectDataset, SnowflakeDataset, SparkObjectDataset, SQLServerTableDataset, SquareObjectDataset, SybaseTableDataset, TeradataTableDataset, VerticaTableDataset, WebTableDataset, XeroObjectDataset, XmlDataset, ZohoObjectDataset. + sub-classes are: AmazonMwsObjectDataset, AmazonRedshiftTableDataset, AmazonS3Dataset, AvroDataset, AzureBlobDataset, AzureBlobFsDataset, AzureDataExplorerTableDataset, AzureDataLakeStoreDataset, AzureDatabricksDeltaLakeDataset, AzureMariaDBTableDataset, AzureMySQLTableDataset, AzurePostgreSQLTableDataset, AzureSearchIndexDataset, AzureSQLDWTableDataset, AzureSQLMiTableDataset, AzureSQLTableDataset, AzureTableDataset, BinaryDataset, CassandraTableDataset, CommonDataServiceForAppsEntityDataset, ConcurObjectDataset, CosmosDBMongoDBApiCollectionDataset, CosmosDBSQLApiCollectionDataset, CouchbaseTableDataset, CustomDataset, Db2TableDataset, DelimitedTextDataset, DocumentDBCollectionDataset, DrillTableDataset, DynamicsAxResourceDataset, DynamicsCrmEntityDataset, DynamicsEntityDataset, EloquaObjectDataset, ExcelDataset, FileShareDataset, GoogleAdWordsObjectDataset, GoogleBigQueryObjectDataset, GreenplumTableDataset, HBaseObjectDataset, HiveObjectDataset, HttpDataset, HubspotObjectDataset, ImpalaObjectDataset, InformixTableDataset, JiraObjectDataset, JsonDataset, MagentoObjectDataset, MariaDBTableDataset, MarketoObjectDataset, MicrosoftAccessTableDataset, MongoDBAtlasCollectionDataset, MongoDBCollectionDataset, MongoDBV2CollectionDataset, MySQLTableDataset, NetezzaTableDataset, ODataResourceDataset, OdbcTableDataset, Office365Dataset, OracleServiceCloudObjectDataset, OracleTableDataset, OrcDataset, ParquetDataset, PaypalObjectDataset, PhoenixObjectDataset, PostgreSQLTableDataset, PrestoObjectDataset, QuickBooksObjectDataset, RelationalTableDataset, ResponsysObjectDataset, RestResourceDataset, SalesforceMarketingCloudObjectDataset, SalesforceObjectDataset, SalesforceServiceCloudObjectDataset, SapBwCubeDataset, SapCloudForCustomerResourceDataset, SapEccResourceDataset, SapHanaTableDataset, SapOpenHubTableDataset, SapTableResourceDataset, ServiceNowObjectDataset, SharePointOnlineListResourceDataset, ShopifyObjectDataset, SnowflakeDataset, SparkObjectDataset, SQLServerTableDataset, SquareObjectDataset, SybaseTableDataset, TeradataTableDataset, VerticaTableDataset, WebTableDataset, XeroObjectDataset, XmlDataset, ZohoObjectDataset. All required parameters must be populated in order to send to Azure. @@ -593,7 +593,7 @@ class Dataset(msrest.serialization.Model): } _subtype_map = { - 'type': {'AmazonMWSObject': 'AmazonMwsObjectDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AmazonS3Object': 'AmazonS3Dataset', 'Avro': 'AvroDataset', 'AzureBlob': 'AzureBlobDataset', 'AzureBlobFSFile': 'AzureBlobFsDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'AzureMySqlTable': 'AzureMySQLTableDataset', 'AzurePostgreSqlTable': 'AzurePostgreSQLTableDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'AzureSqlDWTable': 'AzureSQLDWTableDataset', 'AzureSqlMITable': 'AzureSQLMiTableDataset', 'AzureSqlTable': 'AzureSQLTableDataset', 'AzureTable': 'AzureTableDataset', 'Binary': 'BinaryDataset', 'CassandraTable': 'CassandraTableDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'ConcurObject': 'ConcurObjectDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDBMongoDBApiCollectionDataset', 'CosmosDbSqlApiCollection': 'CosmosDBSQLApiCollectionDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'CustomDataset': 'CustomDataset', 'Db2Table': 'Db2TableDataset', 'DelimitedText': 'DelimitedTextDataset', 'DocumentDbCollection': 'DocumentDBCollectionDataset', 'DrillTable': 'DrillTableDataset', 'DynamicsAXResource': 'DynamicsAxResourceDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'EloquaObject': 'EloquaObjectDataset', 'Excel': 'ExcelDataset', 'FileShare': 'FileShareDataset', 'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'HBaseObject': 'HBaseObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HttpFile': 'HttpDataset', 'HubspotObject': 'HubspotObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'InformixTable': 'InformixTableDataset', 'JiraObject': 'JiraObjectDataset', 'Json': 'JsonDataset', 'MagentoObject': 'MagentoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MarketoObject': 'MarketoObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'MongoDbCollection': 'MongoDBCollectionDataset', 'MongoDbV2Collection': 'MongoDBV2CollectionDataset', 'MySqlTable': 'MySQLTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ODataResource': 'ODataResourceDataset', 'OdbcTable': 'OdbcTableDataset', 'Office365Table': 'Office365Dataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'OracleTable': 'OracleTableDataset', 'Orc': 'OrcDataset', 'Parquet': 'ParquetDataset', 'PaypalObject': 'PaypalObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PostgreSqlTable': 'PostgreSQLTableDataset', 'PrestoObject': 'PrestoObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'RestResource': 'RestResourceDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SapBwCube': 'SapBwCubeDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'SharePointOnlineListResource': 'SharePointOnlineListResourceDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'SnowflakeTable': 'SnowflakeDataset', 'SparkObject': 'SparkObjectDataset', 'SqlServerTable': 'SQLServerTableDataset', 'SquareObject': 'SquareObjectDataset', 'SybaseTable': 'SybaseTableDataset', 'TeradataTable': 'TeradataTableDataset', 'VerticaTable': 'VerticaTableDataset', 'WebTable': 'WebTableDataset', 'XeroObject': 'XeroObjectDataset', 'Xml': 'XmlDataset', 'ZohoObject': 'ZohoObjectDataset'} + 'type': {'AmazonMWSObject': 'AmazonMwsObjectDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AmazonS3Object': 'AmazonS3Dataset', 'Avro': 'AvroDataset', 'AzureBlob': 'AzureBlobDataset', 'AzureBlobFSFile': 'AzureBlobFsDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'AzureDatabricksDeltaLakeDataset': 'AzureDatabricksDeltaLakeDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'AzureMySqlTable': 'AzureMySQLTableDataset', 'AzurePostgreSqlTable': 'AzurePostgreSQLTableDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'AzureSqlDWTable': 'AzureSQLDWTableDataset', 'AzureSqlMITable': 'AzureSQLMiTableDataset', 'AzureSqlTable': 'AzureSQLTableDataset', 'AzureTable': 'AzureTableDataset', 'Binary': 'BinaryDataset', 'CassandraTable': 'CassandraTableDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'ConcurObject': 'ConcurObjectDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDBMongoDBApiCollectionDataset', 'CosmosDbSqlApiCollection': 'CosmosDBSQLApiCollectionDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'CustomDataset': 'CustomDataset', 'Db2Table': 'Db2TableDataset', 'DelimitedText': 'DelimitedTextDataset', 'DocumentDbCollection': 'DocumentDBCollectionDataset', 'DrillTable': 'DrillTableDataset', 'DynamicsAXResource': 'DynamicsAxResourceDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'EloquaObject': 'EloquaObjectDataset', 'Excel': 'ExcelDataset', 'FileShare': 'FileShareDataset', 'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'HBaseObject': 'HBaseObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HttpFile': 'HttpDataset', 'HubspotObject': 'HubspotObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'InformixTable': 'InformixTableDataset', 'JiraObject': 'JiraObjectDataset', 'Json': 'JsonDataset', 'MagentoObject': 'MagentoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MarketoObject': 'MarketoObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'MongoDbAtlasCollection': 'MongoDBAtlasCollectionDataset', 'MongoDbCollection': 'MongoDBCollectionDataset', 'MongoDbV2Collection': 'MongoDBV2CollectionDataset', 'MySqlTable': 'MySQLTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ODataResource': 'ODataResourceDataset', 'OdbcTable': 'OdbcTableDataset', 'Office365Table': 'Office365Dataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'OracleTable': 'OracleTableDataset', 'Orc': 'OrcDataset', 'Parquet': 'ParquetDataset', 'PaypalObject': 'PaypalObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PostgreSqlTable': 'PostgreSQLTableDataset', 'PrestoObject': 'PrestoObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'RestResource': 'RestResourceDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SapBwCube': 'SapBwCubeDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'SharePointOnlineListResource': 'SharePointOnlineListResourceDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'SnowflakeTable': 'SnowflakeDataset', 'SparkObject': 'SparkObjectDataset', 'SqlServerTable': 'SQLServerTableDataset', 'SquareObject': 'SquareObjectDataset', 'SybaseTable': 'SybaseTableDataset', 'TeradataTable': 'TeradataTableDataset', 'VerticaTable': 'VerticaTableDataset', 'WebTable': 'WebTableDataset', 'XeroObject': 'XeroObjectDataset', 'Xml': 'XmlDataset', 'ZohoObject': 'ZohoObjectDataset'} } def __init__( @@ -693,7 +693,7 @@ class CopySource(msrest.serialization.Model): """A copy activity source. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AvroSource, AzureBlobFsSource, AzureDataExplorerSource, AzureDataLakeStoreSource, BinarySource, BlobSource, CommonDataServiceForAppsSource, CosmosDBMongoDBApiSource, CosmosDBSQLApiSource, DelimitedTextSource, DocumentDBCollectionSource, DynamicsCrmSource, DynamicsSource, ExcelSource, FileSystemSource, HdfsSource, HttpSource, JsonSource, MicrosoftAccessSource, MongoDBSource, MongoDBV2Source, ODataSource, Office365Source, OracleSource, OrcSource, ParquetSource, RelationalSource, RestSource, SalesforceServiceCloudSource, SharePointOnlineListSource, SnowflakeSource, TabularSource, WebSource, XmlSource. + sub-classes are: AvroSource, AzureBlobFsSource, AzureDataExplorerSource, AzureDataLakeStoreSource, AzureDatabricksDeltaLakeSource, BinarySource, BlobSource, CommonDataServiceForAppsSource, CosmosDBMongoDBApiSource, CosmosDBSQLApiSource, DelimitedTextSource, DocumentDBCollectionSource, DynamicsCrmSource, DynamicsSource, ExcelSource, FileSystemSource, HdfsSource, HttpSource, JsonSource, MicrosoftAccessSource, MongoDBAtlasSource, MongoDBSource, MongoDBV2Source, ODataSource, Office365Source, OracleSource, OrcSource, ParquetSource, RelationalSource, RestSource, SalesforceServiceCloudSource, SharePointOnlineListSource, SnowflakeSource, TabularSource, WebSource, XmlSource. All required parameters must be populated in order to send to Azure. @@ -726,7 +726,7 @@ class CopySource(msrest.serialization.Model): } _subtype_map = { - 'type': {'AvroSource': 'AvroSource', 'AzureBlobFSSource': 'AzureBlobFsSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'BinarySource': 'BinarySource', 'BlobSource': 'BlobSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'CosmosDbMongoDbApiSource': 'CosmosDBMongoDBApiSource', 'CosmosDbSqlApiSource': 'CosmosDBSQLApiSource', 'DelimitedTextSource': 'DelimitedTextSource', 'DocumentDbCollectionSource': 'DocumentDBCollectionSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'ExcelSource': 'ExcelSource', 'FileSystemSource': 'FileSystemSource', 'HdfsSource': 'HdfsSource', 'HttpSource': 'HttpSource', 'JsonSource': 'JsonSource', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'MongoDbSource': 'MongoDBSource', 'MongoDbV2Source': 'MongoDBV2Source', 'ODataSource': 'ODataSource', 'Office365Source': 'Office365Source', 'OracleSource': 'OracleSource', 'OrcSource': 'OrcSource', 'ParquetSource': 'ParquetSource', 'RelationalSource': 'RelationalSource', 'RestSource': 'RestSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SharePointOnlineListSource': 'SharePointOnlineListSource', 'SnowflakeSource': 'SnowflakeSource', 'TabularSource': 'TabularSource', 'WebSource': 'WebSource', 'XmlSource': 'XmlSource'} + 'type': {'AvroSource': 'AvroSource', 'AzureBlobFSSource': 'AzureBlobFsSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'AzureDatabricksDeltaLakeSource': 'AzureDatabricksDeltaLakeSource', 'BinarySource': 'BinarySource', 'BlobSource': 'BlobSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'CosmosDbMongoDbApiSource': 'CosmosDBMongoDBApiSource', 'CosmosDbSqlApiSource': 'CosmosDBSQLApiSource', 'DelimitedTextSource': 'DelimitedTextSource', 'DocumentDbCollectionSource': 'DocumentDBCollectionSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'ExcelSource': 'ExcelSource', 'FileSystemSource': 'FileSystemSource', 'HdfsSource': 'HdfsSource', 'HttpSource': 'HttpSource', 'JsonSource': 'JsonSource', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'MongoDbAtlasSource': 'MongoDBAtlasSource', 'MongoDbSource': 'MongoDBSource', 'MongoDbV2Source': 'MongoDBV2Source', 'ODataSource': 'ODataSource', 'Office365Source': 'Office365Source', 'OracleSource': 'OracleSource', 'OrcSource': 'OrcSource', 'ParquetSource': 'ParquetSource', 'RelationalSource': 'RelationalSource', 'RestSource': 'RestSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SharePointOnlineListSource': 'SharePointOnlineListSource', 'SnowflakeSource': 'SnowflakeSource', 'TabularSource': 'TabularSource', 'WebSource': 'WebSource', 'XmlSource': 'XmlSource'} } def __init__( @@ -1235,6 +1235,9 @@ class AmazonS3LinkedService(LinkedService): :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] + :param authentication_type: The authentication type of S3. Allowed value: AccessKey (default) + or TemporarySecurityCredentials. Type: string (or Expression with resultType string). + :type authentication_type: object :param access_key_id: The access key identifier of the Amazon S3 Identity and Access Management (IAM) user. Type: string (or Expression with resultType string). :type access_key_id: object @@ -1245,6 +1248,8 @@ class AmazonS3LinkedService(LinkedService): an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). :type service_url: object + :param session_token: The session token for the S3 temporary security credential. + :type session_token: ~data_factory_management_client.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -1262,9 +1267,11 @@ class AmazonS3LinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, + 'session_token': {'key': 'typeProperties.sessionToken', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -1276,17 +1283,21 @@ def __init__( description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, + authentication_type: Optional[object] = None, access_key_id: Optional[object] = None, secret_access_key: Optional["SecretBase"] = None, service_url: Optional[object] = None, + session_token: Optional["SecretBase"] = None, encrypted_credential: Optional[object] = None, **kwargs ): super(AmazonS3LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'AmazonS3' # type: str + self.authentication_type = authentication_type self.access_key_id = access_key_id self.secret_access_key = secret_access_key self.service_url = service_url + self.session_token = session_token self.encrypted_credential = encrypted_credential @@ -1766,7 +1777,7 @@ class CopySink(msrest.serialization.Model): """A copy activity sink. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AvroSink, AzureBlobFsSink, AzureDataExplorerSink, AzureDataLakeStoreSink, AzureMySQLSink, AzurePostgreSQLSink, AzureQueueSink, AzureSearchIndexSink, AzureSQLSink, AzureTableSink, BinarySink, BlobSink, CommonDataServiceForAppsSink, CosmosDBMongoDBApiSink, CosmosDBSQLApiSink, DelimitedTextSink, DocumentDBCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, InformixSink, JsonSink, MicrosoftAccessSink, OdbcSink, OracleSink, OrcSink, ParquetSink, RestSink, SalesforceServiceCloudSink, SalesforceSink, SapCloudForCustomerSink, SnowflakeSink, SQLDWSink, SQLMiSink, SQLServerSink, SQLSink. + sub-classes are: AvroSink, AzureBlobFsSink, AzureDataExplorerSink, AzureDataLakeStoreSink, AzureDatabricksDeltaLakeSink, AzureMySQLSink, AzurePostgreSQLSink, AzureQueueSink, AzureSearchIndexSink, AzureSQLSink, AzureTableSink, BinarySink, BlobSink, CommonDataServiceForAppsSink, CosmosDBMongoDBApiSink, CosmosDBSQLApiSink, DelimitedTextSink, DocumentDBCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, InformixSink, JsonSink, MicrosoftAccessSink, OdbcSink, OracleSink, OrcSink, ParquetSink, RestSink, SalesforceServiceCloudSink, SalesforceSink, SapCloudForCustomerSink, SnowflakeSink, SQLDWSink, SQLMiSink, SQLServerSink, SQLSink. All required parameters must be populated in order to send to Azure. @@ -1807,7 +1818,7 @@ class CopySink(msrest.serialization.Model): } _subtype_map = { - 'type': {'AvroSink': 'AvroSink', 'AzureBlobFSSink': 'AzureBlobFsSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'AzureMySqlSink': 'AzureMySQLSink', 'AzurePostgreSqlSink': 'AzurePostgreSQLSink', 'AzureQueueSink': 'AzureQueueSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureSqlSink': 'AzureSQLSink', 'AzureTableSink': 'AzureTableSink', 'BinarySink': 'BinarySink', 'BlobSink': 'BlobSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'CosmosDbMongoDbApiSink': 'CosmosDBMongoDBApiSink', 'CosmosDbSqlApiSink': 'CosmosDBSQLApiSink', 'DelimitedTextSink': 'DelimitedTextSink', 'DocumentDbCollectionSink': 'DocumentDBCollectionSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'FileSystemSink': 'FileSystemSink', 'InformixSink': 'InformixSink', 'JsonSink': 'JsonSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'OdbcSink': 'OdbcSink', 'OracleSink': 'OracleSink', 'OrcSink': 'OrcSink', 'ParquetSink': 'ParquetSink', 'RestSink': 'RestSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SQLDWSink', 'SqlMISink': 'SQLMiSink', 'SqlServerSink': 'SQLServerSink', 'SqlSink': 'SQLSink'} + 'type': {'AvroSink': 'AvroSink', 'AzureBlobFSSink': 'AzureBlobFsSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'AzureDatabricksDeltaLakeSink': 'AzureDatabricksDeltaLakeSink', 'AzureMySqlSink': 'AzureMySQLSink', 'AzurePostgreSqlSink': 'AzurePostgreSQLSink', 'AzureQueueSink': 'AzureQueueSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureSqlSink': 'AzureSQLSink', 'AzureTableSink': 'AzureTableSink', 'BinarySink': 'BinarySink', 'BlobSink': 'BlobSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'CosmosDbMongoDbApiSink': 'CosmosDBMongoDBApiSink', 'CosmosDbSqlApiSink': 'CosmosDBSQLApiSink', 'DelimitedTextSink': 'DelimitedTextSink', 'DocumentDbCollectionSink': 'DocumentDBCollectionSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'FileSystemSink': 'FileSystemSink', 'InformixSink': 'InformixSink', 'JsonSink': 'JsonSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'OdbcSink': 'OdbcSink', 'OracleSink': 'OracleSink', 'OrcSink': 'OrcSink', 'ParquetSink': 'ParquetSink', 'RestSink': 'RestSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SQLDWSink', 'SqlMISink': 'SQLMiSink', 'SqlServerSink': 'SQLServerSink', 'SqlSink': 'SQLSink'} } def __init__( @@ -1958,7 +1969,7 @@ class FormatWriteSettings(msrest.serialization.Model): """Format write settings. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AvroWriteSettings, DelimitedTextWriteSettings, JsonWriteSettings. + sub-classes are: AvroWriteSettings, DelimitedTextWriteSettings, JsonWriteSettings, OrcWriteSettings, ParquetWriteSettings. All required parameters must be populated in order to send to Azure. @@ -1979,7 +1990,7 @@ class FormatWriteSettings(msrest.serialization.Model): } _subtype_map = { - 'type': {'AvroWriteSettings': 'AvroWriteSettings', 'DelimitedTextWriteSettings': 'DelimitedTextWriteSettings', 'JsonWriteSettings': 'JsonWriteSettings'} + 'type': {'AvroWriteSettings': 'AvroWriteSettings', 'DelimitedTextWriteSettings': 'DelimitedTextWriteSettings', 'JsonWriteSettings': 'JsonWriteSettings', 'OrcWriteSettings': 'OrcWriteSettings', 'ParquetWriteSettings': 'ParquetWriteSettings'} } def __init__( @@ -2007,6 +2018,13 @@ class AvroWriteSettings(FormatWriteSettings): :type record_name: str :param record_namespace: Record namespace in the write result. :type record_namespace: str + :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the + specified count. Type: integer (or Expression with resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + :code:``_:code:``.:code:`` when copy from non-file + based store without partitionOptions. Type: string (or Expression with resultType string). + :type file_name_prefix: object """ _validation = { @@ -2018,6 +2036,8 @@ class AvroWriteSettings(FormatWriteSettings): 'type': {'key': 'type', 'type': 'str'}, 'record_name': {'key': 'recordName', 'type': 'str'}, 'record_namespace': {'key': 'recordNamespace', 'type': 'str'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, } def __init__( @@ -2026,12 +2046,16 @@ def __init__( additional_properties: Optional[Dict[str, object]] = None, record_name: Optional[str] = None, record_namespace: Optional[str] = None, + max_rows_per_file: Optional[object] = None, + file_name_prefix: Optional[object] = None, **kwargs ): super(AvroWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) self.type = 'AvroWriteSettings' # type: str self.record_name = record_name self.record_namespace = record_namespace + self.max_rows_per_file = max_rows_per_file + self.file_name_prefix = file_name_prefix class CustomSetupBase(msrest.serialization.Model): @@ -3119,6 +3143,444 @@ def __init__( self.block_size_in_mb = block_size_in_mb +class AzureDatabricksDeltaLakeDataset(Dataset): + """Azure Databricks Delta Lake dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~data_factory_management_client.models.DatasetFolder + :param table: The name of delta table. Type: string (or Expression with resultType string). + :type table: object + :param database: The database name of delta table. Type: string (or Expression with resultType + string). + :type database: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + table: Optional[object] = None, + database: Optional[object] = None, + **kwargs + ): + super(AzureDatabricksDeltaLakeDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'AzureDatabricksDeltaLakeDataset' # type: str + self.table = table + self.database = database + + +class ExportSettings(msrest.serialization.Model): + """Export command settings. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureDatabricksDeltaLakeExportCommand, SnowflakeExportCopyCommand. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The export setting type.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'AzureDatabricksDeltaLakeExportCommand': 'AzureDatabricksDeltaLakeExportCommand', 'SnowflakeExportCopyCommand': 'SnowflakeExportCopyCommand'} + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + **kwargs + ): + super(ExportSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = 'ExportSettings' # type: str + + +class AzureDatabricksDeltaLakeExportCommand(ExportSettings): + """Azure Databricks Delta Lake export command settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The export setting type.Constant filled by server. + :type type: str + :param date_format: Specify the date format for the csv in Azure Databricks Delta Lake Copy. + Type: string (or Expression with resultType string). + :type date_format: object + :param timestamp_format: Specify the timestamp format for the csv in Azure Databricks Delta + Lake Copy. Type: string (or Expression with resultType string). + :type timestamp_format: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'date_format': {'key': 'dateFormat', 'type': 'object'}, + 'timestamp_format': {'key': 'timestampFormat', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + date_format: Optional[object] = None, + timestamp_format: Optional[object] = None, + **kwargs + ): + super(AzureDatabricksDeltaLakeExportCommand, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'AzureDatabricksDeltaLakeExportCommand' # type: str + self.date_format = date_format + self.timestamp_format = timestamp_format + + +class ImportSettings(msrest.serialization.Model): + """Import command settings. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureDatabricksDeltaLakeImportCommand, SnowflakeImportCopyCommand. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The import setting type.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'AzureDatabricksDeltaLakeImportCommand': 'AzureDatabricksDeltaLakeImportCommand', 'SnowflakeImportCopyCommand': 'SnowflakeImportCopyCommand'} + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + **kwargs + ): + super(ImportSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = 'ImportSettings' # type: str + + +class AzureDatabricksDeltaLakeImportCommand(ImportSettings): + """Azure Databricks Delta Lake import command settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The import setting type.Constant filled by server. + :type type: str + :param date_format: Specify the date format for csv in Azure Databricks Delta Lake Copy. Type: + string (or Expression with resultType string). + :type date_format: object + :param timestamp_format: Specify the timestamp format for csv in Azure Databricks Delta Lake + Copy. Type: string (or Expression with resultType string). + :type timestamp_format: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'date_format': {'key': 'dateFormat', 'type': 'object'}, + 'timestamp_format': {'key': 'timestampFormat', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + date_format: Optional[object] = None, + timestamp_format: Optional[object] = None, + **kwargs + ): + super(AzureDatabricksDeltaLakeImportCommand, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'AzureDatabricksDeltaLakeImportCommand' # type: str + self.date_format = date_format + self.timestamp_format = timestamp_format + + +class AzureDatabricksDeltaLakeLinkedService(LinkedService): + """Azure Databricks Delta Lake linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks + deployment. Type: string (or Expression with resultType string). + :type domain: object + :param access_token: Access token for databricks REST API. Refer to + https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type access_token: ~data_factory_management_client.models.SecretBase + :param cluster_id: The id of an existing interactive cluster that will be used for all runs of + this job. Type: string (or Expression with resultType string). + :type cluster_id: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'domain': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'domain': {'key': 'typeProperties.domain', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'cluster_id': {'key': 'typeProperties.clusterId', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + *, + domain: object, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + access_token: Optional["SecretBase"] = None, + cluster_id: Optional[object] = None, + encrypted_credential: Optional[object] = None, + **kwargs + ): + super(AzureDatabricksDeltaLakeLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzureDatabricksDeltaLake' # type: str + self.domain = domain + self.access_token = access_token + self.cluster_id = cluster_id + self.encrypted_credential = encrypted_credential + + +class AzureDatabricksDeltaLakeSink(CopySink): + """A copy activity Azure Databricks Delta Lake sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + string). + :type pre_copy_script: object + :param import_settings: Azure Databricks Delta Lake import settings. + :type import_settings: + ~data_factory_management_client.models.AzureDatabricksDeltaLakeImportCommand + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'import_settings': {'key': 'importSettings', 'type': 'AzureDatabricksDeltaLakeImportCommand'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + pre_copy_script: Optional[object] = None, + import_settings: Optional["AzureDatabricksDeltaLakeImportCommand"] = None, + **kwargs + ): + super(AzureDatabricksDeltaLakeSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AzureDatabricksDeltaLakeSink' # type: str + self.pre_copy_script = pre_copy_script + self.import_settings = import_settings + + +class AzureDatabricksDeltaLakeSource(CopySource): + """A copy activity Azure Databricks Delta Lake source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: Azure Databricks Delta Lake Sql query. Type: string (or Expression with + resultType string). + :type query: object + :param export_settings: Azure Databricks Delta Lake export settings. + :type export_settings: + ~data_factory_management_client.models.AzureDatabricksDeltaLakeExportCommand + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + 'export_settings': {'key': 'exportSettings', 'type': 'AzureDatabricksDeltaLakeExportCommand'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query: Optional[object] = None, + export_settings: Optional["AzureDatabricksDeltaLakeExportCommand"] = None, + **kwargs + ): + super(AzureDatabricksDeltaLakeSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AzureDatabricksDeltaLakeSource' # type: str + self.query = query + self.export_settings = export_settings + + class AzureDatabricksLinkedService(LinkedService): """Azure Databricks linked service. @@ -8828,7 +9290,7 @@ class CompressionReadSettings(msrest.serialization.Model): """Compression read settings. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: ZipDeflateReadSettings. + sub-classes are: TarGZipReadSettings, TarReadSettings, ZipDeflateReadSettings. All required parameters must be populated in order to send to Azure. @@ -8849,7 +9311,7 @@ class CompressionReadSettings(msrest.serialization.Model): } _subtype_map = { - 'type': {'ZipDeflateReadSettings': 'ZipDeflateReadSettings'} + 'type': {'TarGZipReadSettings': 'TarGZipReadSettings', 'TarReadSettings': 'TarReadSettings', 'ZipDeflateReadSettings': 'ZipDeflateReadSettings'} } def __init__( @@ -8881,6 +9343,9 @@ class ConcurLinkedService(LinkedService): :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] + :param connection_properties: Properties used to connect to Concur. It is mutually exclusive + with any other properties in the linked service. Type: object. + :type connection_properties: object :param client_id: Required. Application client_id supplied by Concur App Management. :type client_id: object :param username: Required. The user name that you use to access Concur Service. @@ -8917,6 +9382,7 @@ class ConcurLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_properties': {'key': 'typeProperties.connectionProperties', 'type': 'object'}, 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, @@ -8936,6 +9402,7 @@ def __init__( description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, + connection_properties: Optional[object] = None, password: Optional["SecretBase"] = None, use_encrypted_endpoints: Optional[object] = None, use_host_verification: Optional[object] = None, @@ -8945,6 +9412,7 @@ def __init__( ): super(ConcurLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'Concur' # type: str + self.connection_properties = connection_properties self.client_id = client_id self.username = username self.password = password @@ -9218,9 +9686,11 @@ class CopyActivity(ExecutionActivity): EnableSkipIncompatibleRow is true. :type redirect_incompatible_row_settings: ~data_factory_management_client.models.RedirectIncompatibleRowSettings - :param log_storage_settings: Log storage settings customer need to provide when enabling - session log. + :param log_storage_settings: (Deprecated. Please use LogSettings) Log storage settings customer + need to provide when enabling session log. :type log_storage_settings: ~data_factory_management_client.models.LogStorageSettings + :param log_settings: Log settings customer needs provide when enabling log. + :type log_settings: ~data_factory_management_client.models.LogSettings :param preserve_rules: Preserve Rules. :type preserve_rules: list[object] :param preserve: Preserve rules. @@ -9260,6 +9730,7 @@ class CopyActivity(ExecutionActivity): 'enable_skip_incompatible_row': {'key': 'typeProperties.enableSkipIncompatibleRow', 'type': 'object'}, 'redirect_incompatible_row_settings': {'key': 'typeProperties.redirectIncompatibleRowSettings', 'type': 'RedirectIncompatibleRowSettings'}, 'log_storage_settings': {'key': 'typeProperties.logStorageSettings', 'type': 'LogStorageSettings'}, + 'log_settings': {'key': 'typeProperties.logSettings', 'type': 'LogSettings'}, 'preserve_rules': {'key': 'typeProperties.preserveRules', 'type': '[object]'}, 'preserve': {'key': 'typeProperties.preserve', 'type': '[object]'}, 'validate_data_consistency': {'key': 'typeProperties.validateDataConsistency', 'type': 'object'}, @@ -9288,6 +9759,7 @@ def __init__( enable_skip_incompatible_row: Optional[object] = None, redirect_incompatible_row_settings: Optional["RedirectIncompatibleRowSettings"] = None, log_storage_settings: Optional["LogStorageSettings"] = None, + log_settings: Optional["LogSettings"] = None, preserve_rules: Optional[List[object]] = None, preserve: Optional[List[object]] = None, validate_data_consistency: Optional[object] = None, @@ -9308,12 +9780,41 @@ def __init__( self.enable_skip_incompatible_row = enable_skip_incompatible_row self.redirect_incompatible_row_settings = redirect_incompatible_row_settings self.log_storage_settings = log_storage_settings + self.log_settings = log_settings self.preserve_rules = preserve_rules self.preserve = preserve self.validate_data_consistency = validate_data_consistency self.skip_error_file = skip_error_file +class CopyActivityLogSettings(msrest.serialization.Model): + """Settings for copy activity log. + + :param log_level: Gets or sets the log level, support: Info, Warning. Type: string (or + Expression with resultType string). + :type log_level: object + :param enable_reliable_logging: Specifies whether to enable reliable logging. Type: boolean (or + Expression with resultType boolean). + :type enable_reliable_logging: object + """ + + _attribute_map = { + 'log_level': {'key': 'logLevel', 'type': 'object'}, + 'enable_reliable_logging': {'key': 'enableReliableLogging', 'type': 'object'}, + } + + def __init__( + self, + *, + log_level: Optional[object] = None, + enable_reliable_logging: Optional[object] = None, + **kwargs + ): + super(CopyActivityLogSettings, self).__init__(**kwargs) + self.log_level = log_level + self.enable_reliable_logging = enable_reliable_logging + + class CopyTranslator(msrest.serialization.Model): """A copy activity translator. @@ -11536,7 +12037,7 @@ class DatasetCompression(msrest.serialization.Model): """The compression method used on a dataset. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: DatasetBZip2Compression, DatasetDeflateCompression, DatasetGZipCompression, DatasetZipDeflateCompression. + sub-classes are: DatasetBZip2Compression, DatasetDeflateCompression, DatasetGZipCompression, DatasetTarCompression, DatasetTarGZipCompression, DatasetZipDeflateCompression. All required parameters must be populated in order to send to Azure. @@ -11557,7 +12058,7 @@ class DatasetCompression(msrest.serialization.Model): } _subtype_map = { - 'type': {'BZip2': 'DatasetBZip2Compression', 'Deflate': 'DatasetDeflateCompression', 'GZip': 'DatasetGZipCompression', 'ZipDeflate': 'DatasetZipDeflateCompression'} + 'type': {'BZip2': 'DatasetBZip2Compression', 'Deflate': 'DatasetDeflateCompression', 'GZip': 'DatasetGZipCompression', 'Tar': 'DatasetTarCompression', 'TarGZip': 'DatasetTarGZipCompression', 'ZipDeflate': 'DatasetZipDeflateCompression'} } def __init__( @@ -11901,6 +12402,73 @@ def __init__( self.type = type +class DatasetTarCompression(DatasetCompression): + """The Tar archive method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset compression.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + **kwargs + ): + super(DatasetTarCompression, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'Tar' # type: str + + +class DatasetTarGZipCompression(DatasetCompression): + """The TarGZip compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset compression.Constant filled by server. + :type type: str + :param level: The TarGZip compression level. Possible values include: "Optimal", "Fastest". + :type level: str or ~data_factory_management_client.models.DatasetCompressionLevel + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'str'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + level: Optional[Union[str, "DatasetCompressionLevel"]] = None, + **kwargs + ): + super(DatasetTarGZipCompression, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'TarGZip' # type: str + self.level = level + + class DatasetZipDeflateCompression(DatasetCompression): """The ZipDeflate compression method used on a dataset. @@ -12335,7 +12903,7 @@ class DelimitedTextDataset(Dataset): resultType string). :type encoding_name: object :param compression_codec: Possible values include: "none", "gzip", "snappy", "lzo", "bzip2", - "deflate", "zipDeflate", "lz4". + "deflate", "zipDeflate", "lz4", "tar", "tarGZip". :type compression_codec: str or ~data_factory_management_client.models.CompressionCodec :param compression_level: The data compression method used for DelimitedText. Possible values include: "Optimal", "Fastest". @@ -12602,6 +13170,13 @@ class DelimitedTextWriteSettings(FormatWriteSettings): :param file_extension: Required. The file extension used to create the files. Type: string (or Expression with resultType string). :type file_extension: object + :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the + specified count. Type: integer (or Expression with resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + :code:``_:code:``.:code:`` when copy from non-file + based store without partitionOptions. Type: string (or Expression with resultType string). + :type file_name_prefix: object """ _validation = { @@ -12614,6 +13189,8 @@ class DelimitedTextWriteSettings(FormatWriteSettings): 'type': {'key': 'type', 'type': 'str'}, 'quote_all_text': {'key': 'quoteAllText', 'type': 'object'}, 'file_extension': {'key': 'fileExtension', 'type': 'object'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, } def __init__( @@ -12622,12 +13199,16 @@ def __init__( file_extension: object, additional_properties: Optional[Dict[str, object]] = None, quote_all_text: Optional[object] = None, + max_rows_per_file: Optional[object] = None, + file_name_prefix: Optional[object] = None, **kwargs ): super(DelimitedTextWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) self.type = 'DelimitedTextWriteSettings' # type: str self.quote_all_text = quote_all_text self.file_extension = file_extension + self.max_rows_per_file = max_rows_per_file + self.file_name_prefix = file_name_prefix class DependencyReference(msrest.serialization.Model): @@ -14604,23 +15185,24 @@ class ExecuteDataFlowActivityTypePropertiesCompute(msrest.serialization.Model): """Compute properties for data flow activity. :param compute_type: Compute type of the cluster which will execute data flow job. Possible - values include: "General", "MemoryOptimized", "ComputeOptimized". - :type compute_type: str or ~data_factory_management_client.models.DataFlowComputeType + values include: 'General', 'MemoryOptimized', 'ComputeOptimized'. Type: string (or Expression + with resultType string). + :type compute_type: object :param core_count: Core count of the cluster which will execute data flow job. Supported values - are: 8, 16, 32, 48, 80, 144 and 272. - :type core_count: int + are: 8, 16, 32, 48, 80, 144 and 272. Type: integer (or Expression with resultType integer). + :type core_count: object """ _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'core_count': {'key': 'coreCount', 'type': 'int'}, + 'compute_type': {'key': 'computeType', 'type': 'object'}, + 'core_count': {'key': 'coreCount', 'type': 'object'}, } def __init__( self, *, - compute_type: Optional[Union[str, "DataFlowComputeType"]] = None, - core_count: Optional[int] = None, + compute_type: Optional[object] = None, + core_count: Optional[object] = None, **kwargs ): super(ExecuteDataFlowActivityTypePropertiesCompute, self).__init__(**kwargs) @@ -14819,45 +15401,6 @@ def __init__( self.log_location = log_location -class ExportSettings(msrest.serialization.Model): - """Export command settings. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SnowflakeExportCopyCommand. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The export setting type.Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'SnowflakeExportCopyCommand': 'SnowflakeExportCopyCommand'} - } - - def __init__( - self, - *, - additional_properties: Optional[Dict[str, object]] = None, - **kwargs - ): - super(ExportSettings, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.type = 'ExportSettings' # type: str - - class ExposureControlBatchRequest(msrest.serialization.Model): """A list of exposure control features. @@ -15089,6 +15632,9 @@ class Factory(Resource): :param global_parameters: List of parameters for factory. :type global_parameters: dict[str, ~data_factory_management_client.models.GlobalParameterSpecification] + :param public_network_access: Whether or not public network access is allowed for the data + factory. Possible values include: "Enabled", "Disabled". + :type public_network_access: str or ~data_factory_management_client.models.PublicNetworkAccess """ _validation = { @@ -15115,6 +15661,7 @@ class Factory(Resource): 'version': {'key': 'properties.version', 'type': 'str'}, 'repo_configuration': {'key': 'properties.repoConfiguration', 'type': 'FactoryRepoConfiguration'}, 'global_parameters': {'key': 'properties.globalParameters', 'type': '{GlobalParameterSpecification}'}, + 'public_network_access': {'key': 'properties.publicNetworkAccess', 'type': 'str'}, } def __init__( @@ -15126,6 +15673,7 @@ def __init__( identity: Optional["FactoryIdentity"] = None, repo_configuration: Optional["FactoryRepoConfiguration"] = None, global_parameters: Optional[Dict[str, "GlobalParameterSpecification"]] = None, + public_network_access: Optional[Union[str, "PublicNetworkAccess"]] = None, **kwargs ): super(Factory, self).__init__(location=location, tags=tags, **kwargs) @@ -15136,6 +15684,7 @@ def __init__( self.version = None self.repo_configuration = repo_configuration self.global_parameters = global_parameters + self.public_network_access = public_network_access class FactoryRepoConfiguration(msrest.serialization.Model): @@ -20030,45 +20579,6 @@ def __init__( self.query = query -class ImportSettings(msrest.serialization.Model): - """Import command settings. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SnowflakeImportCopyCommand. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The import setting type.Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'SnowflakeImportCopyCommand': 'SnowflakeImportCopyCommand'} - } - - def __init__( - self, - *, - additional_properties: Optional[Dict[str, object]] = None, - **kwargs - ): - super(ImportSettings, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.type = 'ImportSettings' # type: str - - class InformixLinkedService(LinkedService): """Informix linked service. @@ -22113,8 +22623,81 @@ def __init__( self.properties = properties +class LogLocationSettings(msrest.serialization.Model): + """Log location settings. + + All required parameters must be populated in order to send to Azure. + + :param linked_service_name: Required. Log storage linked service reference. + :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :param path: The path to storage for storing detailed logs of activity execution. Type: string + (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'path': {'key': 'path', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + path: Optional[object] = None, + **kwargs + ): + super(LogLocationSettings, self).__init__(**kwargs) + self.linked_service_name = linked_service_name + self.path = path + + +class LogSettings(msrest.serialization.Model): + """Log settings. + + All required parameters must be populated in order to send to Azure. + + :param enable_copy_activity_log: Specifies whether to enable copy activity log. Type: boolean + (or Expression with resultType boolean). + :type enable_copy_activity_log: object + :param copy_activity_log_settings: Specifies settings for copy activity log. + :type copy_activity_log_settings: + ~data_factory_management_client.models.CopyActivityLogSettings + :param log_location_settings: Required. Log location settings customer needs to provide when + enabling log. + :type log_location_settings: ~data_factory_management_client.models.LogLocationSettings + """ + + _validation = { + 'log_location_settings': {'required': True}, + } + + _attribute_map = { + 'enable_copy_activity_log': {'key': 'enableCopyActivityLog', 'type': 'object'}, + 'copy_activity_log_settings': {'key': 'copyActivityLogSettings', 'type': 'CopyActivityLogSettings'}, + 'log_location_settings': {'key': 'logLocationSettings', 'type': 'LogLocationSettings'}, + } + + def __init__( + self, + *, + log_location_settings: "LogLocationSettings", + enable_copy_activity_log: Optional[object] = None, + copy_activity_log_settings: Optional["CopyActivityLogSettings"] = None, + **kwargs + ): + super(LogSettings, self).__init__(**kwargs) + self.enable_copy_activity_log = enable_copy_activity_log + self.copy_activity_log_settings = copy_activity_log_settings + self.log_location_settings = log_location_settings + + class LogStorageSettings(msrest.serialization.Model): - """Log storage settings. + """(Deprecated. Please use LogSettings) Log storage settings. All required parameters must be populated in order to send to Azure. @@ -23736,6 +24319,216 @@ def __init__( self.table_name = table_name +class MongoDBAtlasCollectionDataset(Dataset): + """The MongoDB Atlas database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~data_factory_management_client.models.DatasetFolder + :param collection: Required. The collection name of the MongoDB Atlas database. Type: string + (or Expression with resultType string). + :type collection: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + 'collection': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + collection: object, + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + **kwargs + ): + super(MongoDBAtlasCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'MongoDbAtlasCollection' # type: str + self.collection = collection + + +class MongoDBAtlasLinkedService(LinkedService): + """Linked service for MongoDB Atlas data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: Required. The MongoDB Atlas connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param database: Required. The name of the MongoDB Atlas database that you want to access. + Type: string (or Expression with resultType string). + :type database: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + } + + def __init__( + self, + *, + connection_string: object, + database: object, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + **kwargs + ): + super(MongoDBAtlasLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'MongoDbAtlas' # type: str + self.connection_string = connection_string + self.database = database + + +class MongoDBAtlasSource(CopySource): + """A copy activity source for a MongoDB Atlas database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param filter: Specifies selection filter using query operators. To return all documents in a + collection, omit this parameter or pass an empty document ({}). Type: string (or Expression + with resultType string). + :type filter: object + :param cursor_methods: Cursor methods for Mongodb query. + :type cursor_methods: ~data_factory_management_client.models.MongoDBCursorMethodsProperties + :param batch_size: Specifies the number of documents to return in each batch of the response + from MongoDB Atlas instance. In most cases, modifying the batch size will not affect the user + or the application. This property's main purpose is to avoid hit the limitation of response + size. Type: integer (or Expression with resultType integer). + :type batch_size: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'filter': {'key': 'filter', 'type': 'object'}, + 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDBCursorMethodsProperties'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + filter: Optional[object] = None, + cursor_methods: Optional["MongoDBCursorMethodsProperties"] = None, + batch_size: Optional[object] = None, + query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, + **kwargs + ): + super(MongoDBAtlasSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'MongoDbAtlasSource' # type: str + self.filter = filter + self.cursor_methods = cursor_methods + self.batch_size = batch_size + self.query_timeout = query_timeout + self.additional_columns = additional_columns + + class MongoDBCollectionDataset(Dataset): """The MongoDB database dataset. @@ -26322,7 +27115,7 @@ class OrcDataset(Dataset): :type folder: ~data_factory_management_client.models.DatasetFolder :param location: The location of the ORC data storage. :type location: ~data_factory_management_client.models.DatasetLocation - :param orc_compression_codec: Possible values include: "none", "zlib", "snappy". + :param orc_compression_codec: Possible values include: "none", "zlib", "snappy", "lzo". :type orc_compression_codec: str or ~data_factory_management_client.models.OrcCompressionCodec """ @@ -26432,6 +27225,8 @@ class OrcSink(CopySink): :type max_concurrent_connections: object :param store_settings: ORC store settings. :type store_settings: ~data_factory_management_client.models.StoreWriteSettings + :param format_settings: ORC format settings. + :type format_settings: ~data_factory_management_client.models.OrcWriteSettings """ _validation = { @@ -26447,6 +27242,7 @@ class OrcSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'OrcWriteSettings'}, } def __init__( @@ -26459,11 +27255,13 @@ def __init__( sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, store_settings: Optional["StoreWriteSettings"] = None, + format_settings: Optional["OrcWriteSettings"] = None, **kwargs ): super(OrcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'OrcSink' # type: str self.store_settings = store_settings + self.format_settings = format_settings class OrcSource(CopySource): @@ -26523,6 +27321,50 @@ def __init__( self.additional_columns = additional_columns +class OrcWriteSettings(FormatWriteSettings): + """Orc write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The write setting type.Constant filled by server. + :type type: str + :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the + specified count. Type: integer (or Expression with resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + :code:``_:code:``.:code:`` when copy from non-file + based store without partitionOptions. Type: string (or Expression with resultType string). + :type file_name_prefix: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + max_rows_per_file: Optional[object] = None, + file_name_prefix: Optional[object] = None, + **kwargs + ): + super(OrcWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'OrcWriteSettings' # type: str + self.max_rows_per_file = max_rows_per_file + self.file_name_prefix = file_name_prefix + + class PackageStore(msrest.serialization.Model): """Package store for the SSIS integration runtime. @@ -26619,7 +27461,7 @@ class ParquetDataset(Dataset): :param location: The location of the parquet storage. :type location: ~data_factory_management_client.models.DatasetLocation :param compression_codec: Possible values include: "none", "gzip", "snappy", "lzo", "bzip2", - "deflate", "zipDeflate", "lz4". + "deflate", "zipDeflate", "lz4", "tar", "tarGZip". :type compression_codec: str or ~data_factory_management_client.models.CompressionCodec """ @@ -26729,6 +27571,8 @@ class ParquetSink(CopySink): :type max_concurrent_connections: object :param store_settings: Parquet store settings. :type store_settings: ~data_factory_management_client.models.StoreWriteSettings + :param format_settings: Parquet format settings. + :type format_settings: ~data_factory_management_client.models.ParquetWriteSettings """ _validation = { @@ -26744,6 +27588,7 @@ class ParquetSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'ParquetWriteSettings'}, } def __init__( @@ -26756,11 +27601,13 @@ def __init__( sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, store_settings: Optional["StoreWriteSettings"] = None, + format_settings: Optional["ParquetWriteSettings"] = None, **kwargs ): super(ParquetSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'ParquetSink' # type: str self.store_settings = store_settings + self.format_settings = format_settings class ParquetSource(CopySource): @@ -26820,6 +27667,50 @@ def __init__( self.additional_columns = additional_columns +class ParquetWriteSettings(FormatWriteSettings): + """Parquet write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The write setting type.Constant filled by server. + :type type: str + :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the + specified count. Type: integer (or Expression with resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + :code:``_:code:``.:code:`` when copy from non-file + based store without partitionOptions. Type: string (or Expression with resultType string). + :type file_name_prefix: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + max_rows_per_file: Optional[object] = None, + file_name_prefix: Optional[object] = None, + **kwargs + ): + super(ParquetWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'ParquetWriteSettings' # type: str + self.max_rows_per_file = max_rows_per_file + self.file_name_prefix = file_name_prefix + + class PaypalLinkedService(LinkedService): """Paypal Service linked service. @@ -29234,13 +30125,9 @@ class RestSink(CopySink): :type http_request_timeout: object :param request_interval: The time to await before sending next request, in milliseconds. :type request_interval: object - :param compression_type: Compression Type to Send data in compressed format with Optimal - Compression Level, Default is None. And The Only Supported option is Gzip. - :type compression_type: object - :param wrap_request_json_in_an_object: Wraps Request Array Json into an Object before calling - the rest endpoint , Default is false. ex: if true request content sample format is { rows:[]} - else the format is []. - :type wrap_request_json_in_an_object: object + :param http_compression_type: Http Compression Type to Send data in compressed format with + Optimal Compression Level, Default is None. And The Only Supported option is Gzip. + :type http_compression_type: object """ _validation = { @@ -29259,8 +30146,7 @@ class RestSink(CopySink): 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, 'request_interval': {'key': 'requestInterval', 'type': 'object'}, - 'compression_type': {'key': 'compressionType', 'type': 'object'}, - 'wrap_request_json_in_an_object': {'key': 'wrapRequestJsonInAnObject', 'type': 'object'}, + 'http_compression_type': {'key': 'httpCompressionType', 'type': 'object'}, } def __init__( @@ -29276,8 +30162,7 @@ def __init__( additional_headers: Optional[object] = None, http_request_timeout: Optional[object] = None, request_interval: Optional[object] = None, - compression_type: Optional[object] = None, - wrap_request_json_in_an_object: Optional[object] = None, + http_compression_type: Optional[object] = None, **kwargs ): super(RestSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) @@ -29286,8 +30171,7 @@ def __init__( self.additional_headers = additional_headers self.http_request_timeout = http_request_timeout self.request_interval = request_interval - self.compression_type = compression_type - self.wrap_request_json_in_an_object = wrap_request_json_in_an_object + self.http_compression_type = http_compression_type class RestSource(CopySource): @@ -36665,6 +37549,80 @@ def __init__( self.type_conversion_settings = type_conversion_settings +class TarGZipReadSettings(CompressionReadSettings): + """The TarGZip compression read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The Compression setting type.Constant filled by server. + :type type: str + :param preserve_compression_file_name_as_folder: Preserve the compression file name as folder + path. Type: boolean (or Expression with resultType boolean). + :type preserve_compression_file_name_as_folder: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'preserve_compression_file_name_as_folder': {'key': 'preserveCompressionFileNameAsFolder', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + preserve_compression_file_name_as_folder: Optional[object] = None, + **kwargs + ): + super(TarGZipReadSettings, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'TarGZipReadSettings' # type: str + self.preserve_compression_file_name_as_folder = preserve_compression_file_name_as_folder + + +class TarReadSettings(CompressionReadSettings): + """The Tar compression read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The Compression setting type.Constant filled by server. + :type type: str + :param preserve_compression_file_name_as_folder: Preserve the compression file name as folder + path. Type: boolean (or Expression with resultType boolean). + :type preserve_compression_file_name_as_folder: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'preserve_compression_file_name_as_folder': {'key': 'preserveCompressionFileNameAsFolder', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + preserve_compression_file_name_as_folder: Optional[object] = None, + **kwargs + ): + super(TarReadSettings, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'TarReadSettings' # type: str + self.preserve_compression_file_name_as_folder = preserve_compression_file_name_as_folder + + class TeradataLinkedService(LinkedService): """Linked service for Teradata data source. @@ -37427,7 +38385,7 @@ class TumblingWindowTrigger(Trigger): trigger window that is ready. :type pipeline: ~data_factory_management_client.models.TriggerPipelineReference :param frequency: Required. The frequency of the time windows. Possible values include: - "Minute", "Hour". + "Minute", "Hour", "Month". :type frequency: str or ~data_factory_management_client.models.TumblingWindowFrequency :param interval: Required. The interval of the time windows. The minimum interval allowed is 15 Minutes. diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_activity_run_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_activity_run_operations.py index 192e09232ad..d149191ff28 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_activity_run_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_activity_run_operations.py @@ -9,7 +9,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse from azure.mgmt.core.exceptions import ARMErrorFormat @@ -85,12 +85,15 @@ def query_by_pipeline_run( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ActivityRunsQueryResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) filter_parameters = models.RunFilterParameters(continuation_token=continuation_token_parameter, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, order_by=order_by) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.query_by_pipeline_run.metadata['url'] # type: ignore @@ -109,13 +112,12 @@ def query_by_pipeline_run( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flow_debug_session_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flow_debug_session_operations.py index 446c117302f..71ec4651c24 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flow_debug_session_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flow_debug_session_operations.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse @@ -60,12 +60,15 @@ def _create_initial( ): # type: (...) -> Optional["models.CreateDataFlowDebugSessionResponse"] cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.CreateDataFlowDebugSessionResponse"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) request = models.CreateDataFlowDebugSessionRequest(compute_type=compute_type, core_count=core_count, time_to_live=time_to_live, name=name, properties=properties) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self._create_initial.metadata['url'] # type: ignore @@ -83,13 +86,12 @@ def _create_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(request, 'CreateDataFlowDebugSessionRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -181,7 +183,13 @@ def get_long_running_output(pipeline_response): return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: @@ -214,14 +222,17 @@ def query_by_factory( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.QueryDataFlowDebugSessionsResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -317,12 +328,15 @@ def add_data_flow( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.AddDataFlowToDebugSessionResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) request = models.DataFlowDebugPackage(session_id=session_id, datasets=datasets, linked_services=linked_services, source_settings=source_settings, parameters_debug_settings_parameters=parameters, dataset_parameters=dataset_parameters, folder_path=folder_path, reference_name=reference_name, name=name, properties=properties) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.add_data_flow.metadata['url'] # type: ignore @@ -340,13 +354,12 @@ def add_data_flow( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(request, 'DataFlowDebugPackage') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -384,12 +397,15 @@ def delete( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) request = models.DeleteDataFlowDebugSessionRequest(session_id=session_id) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.delete.metadata['url'] # type: ignore @@ -407,12 +423,12 @@ def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(request, 'DeleteDataFlowDebugSessionRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -436,12 +452,15 @@ def _execute_command_initial( ): # type: (...) -> Optional["models.DataFlowDebugCommandResponse"] cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.DataFlowDebugCommandResponse"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) request = models.DataFlowDebugCommandRequest(session_id=session_id, command=command, command_payload=command_payload) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self._execute_command_initial.metadata['url'] # type: ignore @@ -459,13 +478,12 @@ def _execute_command_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(request, 'DataFlowDebugCommandRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -547,7 +565,13 @@ def get_long_running_output(pipeline_response): return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flow_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flow_operations.py index e0bd3be1783..09da4545ce0 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flow_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flow_operations.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse @@ -74,12 +74,15 @@ def create_or_update( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) data_flow = models.DataFlowResource(properties=properties) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_or_update.metadata['url'] # type: ignore @@ -100,13 +103,12 @@ def create_or_update( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(data_flow, 'DataFlowResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -148,9 +150,12 @@ def get( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore @@ -170,7 +175,7 @@ def get( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -210,9 +215,12 @@ def delete( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.delete.metadata['url'] # type: ignore @@ -230,6 +238,7 @@ def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -263,14 +272,17 @@ def list_by_factory( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_dataset_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_dataset_operations.py index 2f866416c74..8359770b4a3 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_dataset_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_dataset_operations.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse @@ -64,14 +64,17 @@ def list_by_factory( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.DatasetListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -146,12 +149,15 @@ def create_or_update( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.DatasetResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) dataset = models.DatasetResource(properties=properties) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_or_update.metadata['url'] # type: ignore @@ -172,13 +178,12 @@ def create_or_update( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(dataset, 'DatasetResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -220,9 +225,12 @@ def get( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.DatasetResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore @@ -242,7 +250,7 @@ def get( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -284,9 +292,12 @@ def delete( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.delete.metadata['url'] # type: ignore @@ -304,6 +315,7 @@ def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_exposure_control_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_exposure_control_operations.py index d2667ffac81..337978f6306 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_exposure_control_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_exposure_control_operations.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse from azure.mgmt.core.exceptions import ARMErrorFormat @@ -66,12 +66,15 @@ def get_feature_value( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ExposureControlResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) exposure_control_request = models.ExposureControlRequest(feature_name=feature_name, feature_type=feature_type) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.get_feature_value.metadata['url'] # type: ignore @@ -88,13 +91,12 @@ def get_feature_value( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(exposure_control_request, 'ExposureControlRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -135,12 +137,15 @@ def get_feature_value_by_factory( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ExposureControlResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) exposure_control_request = models.ExposureControlRequest(feature_name=feature_name, feature_type=feature_type) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.get_feature_value_by_factory.metadata['url'] # type: ignore @@ -158,13 +163,12 @@ def get_feature_value_by_factory( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(exposure_control_request, 'ExposureControlRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -202,12 +206,15 @@ def query_feature_value_by_factory( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ExposureControlBatchResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) exposure_control_batch_request = models.ExposureControlBatchRequest(exposure_control_requests=exposure_control_requests) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.query_feature_value_by_factory.metadata['url'] # type: ignore @@ -225,13 +232,12 @@ def query_feature_value_by_factory( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(exposure_control_batch_request, 'ExposureControlBatchRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_factory_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_factory_operations.py index 5b8622e97f9..d05f87d2835 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_factory_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_factory_operations.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse @@ -18,7 +18,7 @@ if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -58,14 +58,17 @@ def list( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.FactoryListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -131,12 +134,15 @@ def configure_factory_repo( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.Factory"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) factory_repo_update = models.FactoryRepoUpdate(factory_resource_id=factory_resource_id, repo_configuration=repo_configuration) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.configure_factory_repo.metadata['url'] # type: ignore @@ -153,13 +159,12 @@ def configure_factory_repo( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(factory_repo_update, 'FactoryRepoUpdate') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -191,14 +196,17 @@ def list_by_resource_group( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.FactoryListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -253,6 +261,7 @@ def create_or_update( identity=None, # type: Optional["models.FactoryIdentity"] repo_configuration=None, # type: Optional["models.FactoryRepoConfiguration"] global_parameters=None, # type: Optional[Dict[str, "models.GlobalParameterSpecification"]] + public_network_access=None, # type: Optional[Union[str, "models.PublicNetworkAccess"]] **kwargs # type: Any ): # type: (...) -> "models.Factory" @@ -275,18 +284,24 @@ def create_or_update( :type repo_configuration: ~data_factory_management_client.models.FactoryRepoConfiguration :param global_parameters: List of parameters for factory. :type global_parameters: dict[str, ~data_factory_management_client.models.GlobalParameterSpecification] + :param public_network_access: Whether or not public network access is allowed for the data + factory. + :type public_network_access: str or ~data_factory_management_client.models.PublicNetworkAccess :keyword callable cls: A custom type or function that will be passed the direct response :return: Factory, or the result of cls(response) :rtype: ~data_factory_management_client.models.Factory :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.Factory"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - factory = models.Factory(location=location, tags=tags, identity=identity, repo_configuration=repo_configuration, global_parameters=global_parameters) + factory = models.Factory(location=location, tags=tags, identity=identity, repo_configuration=repo_configuration, global_parameters=global_parameters, public_network_access=public_network_access) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_or_update.metadata['url'] # type: ignore @@ -306,13 +321,12 @@ def create_or_update( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(factory, 'Factory') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -353,12 +367,15 @@ def update( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.Factory"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) factory_update_parameters = models.FactoryUpdateParameters(tags=tags, identity=identity) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.update.metadata['url'] # type: ignore @@ -376,13 +393,12 @@ def update( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(factory_update_parameters, 'FactoryUpdateParameters') body_content_kwargs['content'] = body_content request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -421,9 +437,12 @@ def get( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.Factory"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore @@ -442,7 +461,7 @@ def get( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -481,9 +500,12 @@ def delete( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.delete.metadata['url'] # type: ignore @@ -500,6 +522,7 @@ def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -542,12 +565,15 @@ def get_git_hub_access_token( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.GitHubAccessTokenResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) git_hub_access_token_request = models.GitHubAccessTokenRequest(git_hub_access_code=git_hub_access_code, git_hub_client_id=git_hub_client_id, git_hub_access_token_base_url=git_hub_access_token_base_url) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.get_git_hub_access_token.metadata['url'] # type: ignore @@ -565,13 +591,12 @@ def get_git_hub_access_token( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(git_hub_access_token_request, 'GitHubAccessTokenRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -625,12 +650,15 @@ def get_data_plane_access( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.AccessPolicyResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) policy = models.UserAccessPolicy(permissions=permissions, access_resource_path=access_resource_path, profile_name=profile_name, start_time=start_time, expire_time=expire_time) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.get_data_plane_access.metadata['url'] # type: ignore @@ -648,13 +676,12 @@ def get_data_plane_access( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(policy, 'UserAccessPolicy') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_node_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_node_operations.py index a7903633080..f0c07ea708b 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_node_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_node_operations.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse from azure.mgmt.core.exceptions import ARMErrorFormat @@ -69,9 +69,12 @@ def get( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SelfHostedIntegrationRuntimeNode"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore @@ -90,7 +93,7 @@ def get( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -133,9 +136,12 @@ def delete( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.delete.metadata['url'] # type: ignore @@ -154,6 +160,7 @@ def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -197,12 +204,15 @@ def update( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SelfHostedIntegrationRuntimeNode"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) update_integration_runtime_node_request = models.UpdateIntegrationRuntimeNodeRequest(concurrent_jobs_limit=concurrent_jobs_limit) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.update.metadata['url'] # type: ignore @@ -222,13 +232,12 @@ def update( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(update_integration_runtime_node_request, 'UpdateIntegrationRuntimeNodeRequest') body_content_kwargs['content'] = body_content request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -269,9 +278,12 @@ def get_ip_address( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeNodeIpAddress"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get_ip_address.metadata['url'] # type: ignore @@ -290,7 +302,7 @@ def get_ip_address( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_object_metadata_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_object_metadata_operations.py index 461ab7b6539..6fa8dd0ea7c 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_object_metadata_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_object_metadata_operations.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod @@ -55,9 +55,12 @@ def _refresh_initial( ): # type: (...) -> Optional["models.SsisObjectMetadataStatusResponse"] cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.SsisObjectMetadataStatusResponse"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self._refresh_initial.metadata['url'] # type: ignore @@ -75,7 +78,7 @@ def _refresh_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -147,7 +150,14 @@ def get_long_running_output(pipeline_response): return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: @@ -187,12 +197,15 @@ def get( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SsisObjectMetadataListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) get_metadata_request = models.GetSsisObjectMetadataRequest(metadata_path=metadata_path) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore @@ -211,7 +224,7 @@ def get( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] if get_metadata_request is not None: @@ -220,7 +233,6 @@ def get( body_content = None body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_operations.py index 1fb5fc6b30d..1f8f3fb8af4 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_operations.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse @@ -66,14 +66,17 @@ def list_by_factory( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -148,12 +151,15 @@ def create_or_update( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) integration_runtime = models.IntegrationRuntimeResource(properties=properties) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_or_update.metadata['url'] # type: ignore @@ -174,13 +180,12 @@ def create_or_update( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(integration_runtime, 'IntegrationRuntimeResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -223,9 +228,12 @@ def get( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.IntegrationRuntimeResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore @@ -245,7 +253,7 @@ def get( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -295,12 +303,15 @@ def update( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) update_integration_runtime_request = models.UpdateIntegrationRuntimeRequest(auto_update=auto_update, update_delay_offset=update_delay_offset) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.update.metadata['url'] # type: ignore @@ -319,13 +330,12 @@ def update( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(update_integration_runtime_request, 'UpdateIntegrationRuntimeRequest') body_content_kwargs['content'] = body_content request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -363,9 +373,12 @@ def delete( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.delete.metadata['url'] # type: ignore @@ -383,6 +396,7 @@ def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -419,9 +433,12 @@ def get_status( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get_status.metadata['url'] # type: ignore @@ -439,7 +456,7 @@ def get_status( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -480,9 +497,12 @@ def get_connection_info( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeConnectionInfo"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get_connection_info.metadata['url'] # type: ignore @@ -500,7 +520,7 @@ def get_connection_info( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -543,12 +563,15 @@ def regenerate_auth_key( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeAuthKeys"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) regenerate_key_parameters = models.IntegrationRuntimeRegenerateKeyParameters(key_name=key_name) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.regenerate_auth_key.metadata['url'] # type: ignore @@ -567,13 +590,12 @@ def regenerate_auth_key( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(regenerate_key_parameters, 'IntegrationRuntimeRegenerateKeyParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -611,9 +633,12 @@ def list_auth_key( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeAuthKeys"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.list_auth_key.metadata['url'] # type: ignore @@ -631,7 +656,7 @@ def list_auth_key( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -658,9 +683,12 @@ def _start_initial( ): # type: (...) -> Optional["models.IntegrationRuntimeStatusResponse"] cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.IntegrationRuntimeStatusResponse"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self._start_initial.metadata['url'] # type: ignore @@ -678,7 +706,7 @@ def _start_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -750,7 +778,14 @@ def get_long_running_output(pipeline_response): return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: @@ -773,9 +808,12 @@ def _stop_initial( ): # type: (...) -> None cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self._stop_initial.metadata['url'] # type: ignore @@ -793,6 +831,7 @@ def _stop_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -856,7 +895,14 @@ def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: @@ -895,9 +941,12 @@ def sync_credentials( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.sync_credentials.metadata['url'] # type: ignore @@ -915,6 +964,7 @@ def sync_credentials( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -952,9 +1002,12 @@ def get_monitoring_data( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeMonitoringData"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get_monitoring_data.metadata['url'] # type: ignore @@ -972,7 +1025,7 @@ def get_monitoring_data( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -1012,9 +1065,12 @@ def upgrade( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.upgrade.metadata['url'] # type: ignore @@ -1032,6 +1088,7 @@ def upgrade( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -1072,12 +1129,15 @@ def remove_link( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) linked_integration_runtime_request = models.LinkedIntegrationRuntimeRequest(linked_factory_name=linked_factory_name) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.remove_link.metadata['url'] # type: ignore @@ -1096,12 +1156,12 @@ def remove_link( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(linked_integration_runtime_request, 'LinkedIntegrationRuntimeRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -1151,12 +1211,15 @@ def create_linked_integration_runtime( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) create_linked_integration_runtime_request = models.CreateLinkedIntegrationRuntimeRequest(name=name, subscription_id=subscription_id, data_factory_name=data_factory_name, data_factory_location=data_factory_location) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_linked_integration_runtime.metadata['url'] # type: ignore @@ -1175,13 +1238,12 @@ def create_linked_integration_runtime( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(create_linked_integration_runtime_request, 'CreateLinkedIntegrationRuntimeRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_linked_service_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_linked_service_operations.py index 7124cb588eb..7320d56e268 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_linked_service_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_linked_service_operations.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse @@ -64,14 +64,17 @@ def list_by_factory( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -146,12 +149,15 @@ def create_or_update( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) linked_service = models.LinkedServiceResource(properties=properties) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_or_update.metadata['url'] # type: ignore @@ -172,13 +178,12 @@ def create_or_update( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(linked_service, 'LinkedServiceResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -221,9 +226,12 @@ def get( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.LinkedServiceResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore @@ -243,7 +251,7 @@ def get( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -285,9 +293,12 @@ def delete( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.delete.metadata['url'] # type: ignore @@ -305,6 +316,7 @@ def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_private_endpoint_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_private_endpoint_operations.py index 29be0bd0e6d..a08594ceeb4 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_private_endpoint_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_private_endpoint_operations.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse @@ -67,14 +67,17 @@ def list_by_factory( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -163,12 +166,15 @@ def create_or_update( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) managed_private_endpoint = models.ManagedPrivateEndpointResource(connection_state=connection_state, fqdns=fqdns, group_id=group_id, private_link_resource_id=private_link_resource_id) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_or_update.metadata['url'] # type: ignore @@ -190,13 +196,12 @@ def create_or_update( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(managed_private_endpoint, 'ManagedPrivateEndpointResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -242,9 +247,12 @@ def get( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore @@ -265,7 +273,7 @@ def get( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -308,9 +316,12 @@ def delete( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.delete.metadata['url'] # type: ignore @@ -329,6 +340,7 @@ def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_virtual_network_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_virtual_network_operations.py index fa043ca3e59..48ea82af8ee 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_virtual_network_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_virtual_network_operations.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse @@ -64,14 +64,17 @@ def list_by_factory( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -146,12 +149,15 @@ def create_or_update( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) managed_virtual_network = models.ManagedVirtualNetworkResource(properties=properties) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_or_update.metadata['url'] # type: ignore @@ -172,13 +178,12 @@ def create_or_update( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(managed_virtual_network, 'ManagedVirtualNetworkResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -221,9 +226,12 @@ def get( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore @@ -243,7 +251,7 @@ def get( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_operation_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_operation_operations.py index c5cf3d43f6d..76d988e7417 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_operation_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_operation_operations.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse @@ -58,14 +58,17 @@ def list( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.OperationListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_operations.py index d82f423f2cb..62abb5891df 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_operations.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse @@ -64,14 +64,17 @@ def list_by_factory( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -146,10 +149,13 @@ def create_or_update( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_or_update.metadata['url'] # type: ignore @@ -170,13 +176,12 @@ def create_or_update( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(pipeline, 'PipelineResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -218,9 +223,12 @@ def get( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.PipelineResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore @@ -240,7 +248,7 @@ def get( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -282,9 +290,12 @@ def delete( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.delete.metadata['url'] # type: ignore @@ -302,6 +313,7 @@ def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -358,10 +370,13 @@ def create_run( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.CreateRunResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_run.metadata['url'] # type: ignore @@ -388,7 +403,7 @@ def create_run( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] if parameters is not None: @@ -397,7 +412,6 @@ def create_run( body_content = None body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_run_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_run_operations.py index 75634fde5ac..756a14fb6c0 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_run_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_run_operations.py @@ -9,7 +9,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse from azure.mgmt.core.exceptions import ARMErrorFormat @@ -82,12 +82,15 @@ def query_by_factory( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineRunsQueryResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) filter_parameters = models.RunFilterParameters(continuation_token=continuation_token_parameter, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, order_by=order_by) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.query_by_factory.metadata['url'] # type: ignore @@ -105,13 +108,12 @@ def query_by_factory( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -149,9 +151,12 @@ def get( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineRun"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore @@ -169,7 +174,7 @@ def get( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -213,9 +218,12 @@ def cancel( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.cancel.metadata['url'] # type: ignore @@ -235,6 +243,7 @@ def cancel( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_operations.py index 142f32f2c31..d4ce8906015 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_operations.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse @@ -66,14 +66,17 @@ def list_by_factory( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -146,12 +149,15 @@ def query_by_factory( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerQueryResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) filter_parameters = models.TriggerFilterParameters(continuation_token=continuation_token_parameter, parent_trigger_name=parent_trigger_name) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.query_by_factory.metadata['url'] # type: ignore @@ -169,13 +175,12 @@ def query_by_factory( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(filter_parameters, 'TriggerFilterParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -220,12 +225,15 @@ def create_or_update( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) trigger = models.TriggerResource(properties=properties) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_or_update.metadata['url'] # type: ignore @@ -246,13 +254,12 @@ def create_or_update( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(trigger, 'TriggerResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -294,9 +301,12 @@ def get( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore @@ -316,7 +326,7 @@ def get( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -358,9 +368,12 @@ def delete( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.delete.metadata['url'] # type: ignore @@ -378,6 +391,7 @@ def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -401,9 +415,12 @@ def _subscribe_to_event_initial( ): # type: (...) -> Optional["models.TriggerSubscriptionOperationStatus"] cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerSubscriptionOperationStatus"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self._subscribe_to_event_initial.metadata['url'] # type: ignore @@ -421,7 +438,7 @@ def _subscribe_to_event_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -493,7 +510,14 @@ def get_long_running_output(pipeline_response): return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: @@ -529,9 +553,12 @@ def get_event_subscription_status( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get_event_subscription_status.metadata['url'] # type: ignore @@ -549,7 +576,7 @@ def get_event_subscription_status( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -576,9 +603,12 @@ def _unsubscribe_from_event_initial( ): # type: (...) -> Optional["models.TriggerSubscriptionOperationStatus"] cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerSubscriptionOperationStatus"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self._unsubscribe_from_event_initial.metadata['url'] # type: ignore @@ -596,7 +626,7 @@ def _unsubscribe_from_event_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -668,7 +698,14 @@ def get_long_running_output(pipeline_response): return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: @@ -691,9 +728,12 @@ def _start_initial( ): # type: (...) -> None cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self._start_initial.metadata['url'] # type: ignore @@ -711,6 +751,7 @@ def _start_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -774,7 +815,14 @@ def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: @@ -797,9 +845,12 @@ def _stop_initial( ): # type: (...) -> None cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self._stop_initial.metadata['url'] # type: ignore @@ -817,6 +868,7 @@ def _stop_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -880,7 +932,14 @@ def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_run_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_run_operations.py index 3290d8196ab..0e89afba5a3 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_run_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_run_operations.py @@ -9,7 +9,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse from azure.mgmt.core.exceptions import ARMErrorFormat @@ -70,9 +70,12 @@ def rerun( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.rerun.metadata['url'] # type: ignore @@ -91,6 +94,7 @@ def rerun( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -130,9 +134,12 @@ def cancel( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.cancel.metadata['url'] # type: ignore @@ -151,6 +158,7 @@ def cancel( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -202,12 +210,15 @@ def query_by_factory( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerRunsQueryResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) filter_parameters = models.RunFilterParameters(continuation_token=continuation_token_parameter, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, order_by=order_by) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.query_by_factory.metadata['url'] # type: ignore @@ -225,13 +236,12 @@ def query_by_factory( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response diff --git a/src/datafactory/report.md b/src/datafactory/report.md index fb50389d775..af93d925485 100644 --- a/src/datafactory/report.md +++ b/src/datafactory/report.md @@ -1,20 +1,143 @@ # Azure CLI Module Creation Report -### datafactory activity-run query-by-pipeline-run - -query-by-pipeline-run a datafactory activity-run. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory activity-run|ActivityRuns| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|query-by-pipeline-run|QueryByPipelineRun| - -#### Parameters +## EXTENSION +|CLI Extension|Command Groups| +|---------|------------| +|az datafactory|[groups](#CommandGroups) + +## GROUPS +### Command groups in `az datafactory` extension +|CLI Command Group|Group Swagger name|Commands| +|---------|------------|--------| +|az datafactory factory|Factories|[commands](#CommandsInFactories)| +|az datafactory integration-runtime|IntegrationRuntimes|[commands](#CommandsInIntegrationRuntimes)| +|az datafactory integration-runtime-node|IntegrationRuntimeNodes|[commands](#CommandsInIntegrationRuntimeNodes)| +|az datafactory linked-service|LinkedServices|[commands](#CommandsInLinkedServices)| +|az datafactory dataset|Datasets|[commands](#CommandsInDatasets)| +|az datafactory pipeline|Pipelines|[commands](#CommandsInPipelines)| +|az datafactory pipeline-run|PipelineRuns|[commands](#CommandsInPipelineRuns)| +|az datafactory activity-run|ActivityRuns|[commands](#CommandsInActivityRuns)| +|az datafactory trigger|Triggers|[commands](#CommandsInTriggers)| +|az datafactory trigger-run|TriggerRuns|[commands](#CommandsInTriggerRuns)| + +## COMMANDS +### Commands in `az datafactory activity-run` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az datafactory activity-run query-by-pipeline-run](#ActivityRunsQueryByPipelineRun)|QueryByPipelineRun|[Parameters](#ParametersActivityRunsQueryByPipelineRun)|[Example](#ExamplesActivityRunsQueryByPipelineRun)| + +### Commands in `az datafactory dataset` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az datafactory dataset list](#DatasetsListByFactory)|ListByFactory|[Parameters](#ParametersDatasetsListByFactory)|[Example](#ExamplesDatasetsListByFactory)| +|[az datafactory dataset show](#DatasetsGet)|Get|[Parameters](#ParametersDatasetsGet)|[Example](#ExamplesDatasetsGet)| +|[az datafactory dataset create](#DatasetsCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersDatasetsCreateOrUpdate#Create)|[Example](#ExamplesDatasetsCreateOrUpdate#Create)| +|[az datafactory dataset update](#DatasetsCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersDatasetsCreateOrUpdate#Update)|[Example](#ExamplesDatasetsCreateOrUpdate#Update)| +|[az datafactory dataset delete](#DatasetsDelete)|Delete|[Parameters](#ParametersDatasetsDelete)|[Example](#ExamplesDatasetsDelete)| + +### Commands in `az datafactory factory` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az datafactory factory list](#FactoriesListByResourceGroup)|ListByResourceGroup|[Parameters](#ParametersFactoriesListByResourceGroup)|[Example](#ExamplesFactoriesListByResourceGroup)| +|[az datafactory factory list](#FactoriesList)|List|[Parameters](#ParametersFactoriesList)|[Example](#ExamplesFactoriesList)| +|[az datafactory factory show](#FactoriesGet)|Get|[Parameters](#ParametersFactoriesGet)|[Example](#ExamplesFactoriesGet)| +|[az datafactory factory create](#FactoriesCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersFactoriesCreateOrUpdate#Create)|[Example](#ExamplesFactoriesCreateOrUpdate#Create)| +|[az datafactory factory update](#FactoriesUpdate)|Update|[Parameters](#ParametersFactoriesUpdate)|[Example](#ExamplesFactoriesUpdate)| +|[az datafactory factory delete](#FactoriesDelete)|Delete|[Parameters](#ParametersFactoriesDelete)|[Example](#ExamplesFactoriesDelete)| +|[az datafactory factory configure-factory-repo](#FactoriesConfigureFactoryRepo)|ConfigureFactoryRepo|[Parameters](#ParametersFactoriesConfigureFactoryRepo)|[Example](#ExamplesFactoriesConfigureFactoryRepo)| +|[az datafactory factory get-data-plane-access](#FactoriesGetDataPlaneAccess)|GetDataPlaneAccess|[Parameters](#ParametersFactoriesGetDataPlaneAccess)|[Example](#ExamplesFactoriesGetDataPlaneAccess)| +|[az datafactory factory get-git-hub-access-token](#FactoriesGetGitHubAccessToken)|GetGitHubAccessToken|[Parameters](#ParametersFactoriesGetGitHubAccessToken)|[Example](#ExamplesFactoriesGetGitHubAccessToken)| + +### Commands in `az datafactory integration-runtime` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az datafactory integration-runtime list](#IntegrationRuntimesListByFactory)|ListByFactory|[Parameters](#ParametersIntegrationRuntimesListByFactory)|[Example](#ExamplesIntegrationRuntimesListByFactory)| +|[az datafactory integration-runtime show](#IntegrationRuntimesGet)|Get|[Parameters](#ParametersIntegrationRuntimesGet)|[Example](#ExamplesIntegrationRuntimesGet)| +|[az datafactory integration-runtime linked-integration-runtime create](#IntegrationRuntimesCreateLinkedIntegrationRuntime)|CreateLinkedIntegrationRuntime|[Parameters](#ParametersIntegrationRuntimesCreateLinkedIntegrationRuntime)|[Example](#ExamplesIntegrationRuntimesCreateLinkedIntegrationRuntime)| +|[az datafactory integration-runtime managed create](#IntegrationRuntimesCreateOrUpdate#Create#Managed)|CreateOrUpdate#Create#Managed|[Parameters](#ParametersIntegrationRuntimesCreateOrUpdate#Create#Managed)|Not Found| +|[az datafactory integration-runtime self-hosted create](#IntegrationRuntimesCreateOrUpdate#Create#SelfHosted)|CreateOrUpdate#Create#SelfHosted|[Parameters](#ParametersIntegrationRuntimesCreateOrUpdate#Create#SelfHosted)|[Example](#ExamplesIntegrationRuntimesCreateOrUpdate#Create#SelfHosted)| +|[az datafactory integration-runtime update](#IntegrationRuntimesUpdate)|Update|[Parameters](#ParametersIntegrationRuntimesUpdate)|[Example](#ExamplesIntegrationRuntimesUpdate)| +|[az datafactory integration-runtime delete](#IntegrationRuntimesDelete)|Delete|[Parameters](#ParametersIntegrationRuntimesDelete)|[Example](#ExamplesIntegrationRuntimesDelete)| +|[az datafactory integration-runtime get-connection-info](#IntegrationRuntimesGetConnectionInfo)|GetConnectionInfo|[Parameters](#ParametersIntegrationRuntimesGetConnectionInfo)|[Example](#ExamplesIntegrationRuntimesGetConnectionInfo)| +|[az datafactory integration-runtime get-monitoring-data](#IntegrationRuntimesGetMonitoringData)|GetMonitoringData|[Parameters](#ParametersIntegrationRuntimesGetMonitoringData)|[Example](#ExamplesIntegrationRuntimesGetMonitoringData)| +|[az datafactory integration-runtime get-status](#IntegrationRuntimesGetStatus)|GetStatus|[Parameters](#ParametersIntegrationRuntimesGetStatus)|[Example](#ExamplesIntegrationRuntimesGetStatus)| +|[az datafactory integration-runtime list-auth-key](#IntegrationRuntimesListAuthKeys)|ListAuthKeys|[Parameters](#ParametersIntegrationRuntimesListAuthKeys)|[Example](#ExamplesIntegrationRuntimesListAuthKeys)| +|[az datafactory integration-runtime regenerate-auth-key](#IntegrationRuntimesRegenerateAuthKey)|RegenerateAuthKey|[Parameters](#ParametersIntegrationRuntimesRegenerateAuthKey)|[Example](#ExamplesIntegrationRuntimesRegenerateAuthKey)| +|[az datafactory integration-runtime remove-link](#IntegrationRuntimesRemoveLinks)|RemoveLinks|[Parameters](#ParametersIntegrationRuntimesRemoveLinks)|[Example](#ExamplesIntegrationRuntimesRemoveLinks)| +|[az datafactory integration-runtime start](#IntegrationRuntimesStart)|Start|[Parameters](#ParametersIntegrationRuntimesStart)|[Example](#ExamplesIntegrationRuntimesStart)| +|[az datafactory integration-runtime stop](#IntegrationRuntimesStop)|Stop|[Parameters](#ParametersIntegrationRuntimesStop)|[Example](#ExamplesIntegrationRuntimesStop)| +|[az datafactory integration-runtime sync-credentials](#IntegrationRuntimesSyncCredentials)|SyncCredentials|[Parameters](#ParametersIntegrationRuntimesSyncCredentials)|[Example](#ExamplesIntegrationRuntimesSyncCredentials)| +|[az datafactory integration-runtime upgrade](#IntegrationRuntimesUpgrade)|Upgrade|[Parameters](#ParametersIntegrationRuntimesUpgrade)|[Example](#ExamplesIntegrationRuntimesUpgrade)| + +### Commands in `az datafactory integration-runtime-node` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az datafactory integration-runtime-node show](#IntegrationRuntimeNodesGet)|Get|[Parameters](#ParametersIntegrationRuntimeNodesGet)|[Example](#ExamplesIntegrationRuntimeNodesGet)| +|[az datafactory integration-runtime-node update](#IntegrationRuntimeNodesUpdate)|Update|[Parameters](#ParametersIntegrationRuntimeNodesUpdate)|[Example](#ExamplesIntegrationRuntimeNodesUpdate)| +|[az datafactory integration-runtime-node delete](#IntegrationRuntimeNodesDelete)|Delete|[Parameters](#ParametersIntegrationRuntimeNodesDelete)|[Example](#ExamplesIntegrationRuntimeNodesDelete)| +|[az datafactory integration-runtime-node get-ip-address](#IntegrationRuntimeNodesGetIpAddress)|GetIpAddress|[Parameters](#ParametersIntegrationRuntimeNodesGetIpAddress)|[Example](#ExamplesIntegrationRuntimeNodesGetIpAddress)| + +### Commands in `az datafactory linked-service` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az datafactory linked-service list](#LinkedServicesListByFactory)|ListByFactory|[Parameters](#ParametersLinkedServicesListByFactory)|[Example](#ExamplesLinkedServicesListByFactory)| +|[az datafactory linked-service show](#LinkedServicesGet)|Get|[Parameters](#ParametersLinkedServicesGet)|[Example](#ExamplesLinkedServicesGet)| +|[az datafactory linked-service create](#LinkedServicesCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersLinkedServicesCreateOrUpdate#Create)|[Example](#ExamplesLinkedServicesCreateOrUpdate#Create)| +|[az datafactory linked-service update](#LinkedServicesCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersLinkedServicesCreateOrUpdate#Update)|[Example](#ExamplesLinkedServicesCreateOrUpdate#Update)| +|[az datafactory linked-service delete](#LinkedServicesDelete)|Delete|[Parameters](#ParametersLinkedServicesDelete)|[Example](#ExamplesLinkedServicesDelete)| + +### Commands in `az datafactory pipeline` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az datafactory pipeline list](#PipelinesListByFactory)|ListByFactory|[Parameters](#ParametersPipelinesListByFactory)|[Example](#ExamplesPipelinesListByFactory)| +|[az datafactory pipeline show](#PipelinesGet)|Get|[Parameters](#ParametersPipelinesGet)|[Example](#ExamplesPipelinesGet)| +|[az datafactory pipeline create](#PipelinesCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersPipelinesCreateOrUpdate#Create)|[Example](#ExamplesPipelinesCreateOrUpdate#Create)| +|[az datafactory pipeline update](#PipelinesCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersPipelinesCreateOrUpdate#Update)|[Example](#ExamplesPipelinesCreateOrUpdate#Update)| +|[az datafactory pipeline delete](#PipelinesDelete)|Delete|[Parameters](#ParametersPipelinesDelete)|[Example](#ExamplesPipelinesDelete)| +|[az datafactory pipeline create-run](#PipelinesCreateRun)|CreateRun|[Parameters](#ParametersPipelinesCreateRun)|[Example](#ExamplesPipelinesCreateRun)| + +### Commands in `az datafactory pipeline-run` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az datafactory pipeline-run show](#PipelineRunsGet)|Get|[Parameters](#ParametersPipelineRunsGet)|[Example](#ExamplesPipelineRunsGet)| +|[az datafactory pipeline-run cancel](#PipelineRunsCancel)|Cancel|[Parameters](#ParametersPipelineRunsCancel)|[Example](#ExamplesPipelineRunsCancel)| +|[az datafactory pipeline-run query-by-factory](#PipelineRunsQueryByFactory)|QueryByFactory|[Parameters](#ParametersPipelineRunsQueryByFactory)|[Example](#ExamplesPipelineRunsQueryByFactory)| + +### Commands in `az datafactory trigger` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az datafactory trigger list](#TriggersListByFactory)|ListByFactory|[Parameters](#ParametersTriggersListByFactory)|[Example](#ExamplesTriggersListByFactory)| +|[az datafactory trigger show](#TriggersGet)|Get|[Parameters](#ParametersTriggersGet)|[Example](#ExamplesTriggersGet)| +|[az datafactory trigger create](#TriggersCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersTriggersCreateOrUpdate#Create)|[Example](#ExamplesTriggersCreateOrUpdate#Create)| +|[az datafactory trigger update](#TriggersCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersTriggersCreateOrUpdate#Update)|[Example](#ExamplesTriggersCreateOrUpdate#Update)| +|[az datafactory trigger delete](#TriggersDelete)|Delete|[Parameters](#ParametersTriggersDelete)|[Example](#ExamplesTriggersDelete)| +|[az datafactory trigger get-event-subscription-status](#TriggersGetEventSubscriptionStatus)|GetEventSubscriptionStatus|[Parameters](#ParametersTriggersGetEventSubscriptionStatus)|[Example](#ExamplesTriggersGetEventSubscriptionStatus)| +|[az datafactory trigger query-by-factory](#TriggersQueryByFactory)|QueryByFactory|[Parameters](#ParametersTriggersQueryByFactory)|[Example](#ExamplesTriggersQueryByFactory)| +|[az datafactory trigger start](#TriggersStart)|Start|[Parameters](#ParametersTriggersStart)|[Example](#ExamplesTriggersStart)| +|[az datafactory trigger stop](#TriggersStop)|Stop|[Parameters](#ParametersTriggersStop)|[Example](#ExamplesTriggersStop)| +|[az datafactory trigger subscribe-to-event](#TriggersSubscribeToEvents)|SubscribeToEvents|[Parameters](#ParametersTriggersSubscribeToEvents)|[Example](#ExamplesTriggersSubscribeToEvents)| +|[az datafactory trigger unsubscribe-from-event](#TriggersUnsubscribeFromEvents)|UnsubscribeFromEvents|[Parameters](#ParametersTriggersUnsubscribeFromEvents)|[Example](#ExamplesTriggersUnsubscribeFromEvents)| + +### Commands in `az datafactory trigger-run` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az datafactory trigger-run cancel](#TriggerRunsCancel)|Cancel|[Parameters](#ParametersTriggerRunsCancel)|[Example](#ExamplesTriggerRunsCancel)| +|[az datafactory trigger-run query-by-factory](#TriggerRunsQueryByFactory)|QueryByFactory|[Parameters](#ParametersTriggerRunsQueryByFactory)|[Example](#ExamplesTriggerRunsQueryByFactory)| +|[az datafactory trigger-run rerun](#TriggerRunsRerun)|Rerun|[Parameters](#ParametersTriggerRunsRerun)|[Example](#ExamplesTriggerRunsRerun)| + + +## COMMAND DETAILS + +### group `az datafactory activity-run` +#### Command `az datafactory activity-run query-by-pipeline-run` + +##### Example +``` +az datafactory activity-run query-by-pipeline-run --factory-name "exampleFactoryName" --last-updated-after \ +"2018-06-16T00:36:44.3345758Z" --last-updated-before "2018-06-16T00:49:48.3686473Z" --resource-group \ +"exampleResourceGroup" --run-id "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -26,107 +149,64 @@ query-by-pipeline-run a datafactory activity-run. |**--filters**|array|List of filters.|filters|filters| |**--order-by**|array|List of OrderBy option.|order_by|orderBy| -### datafactory dataset create - -create a datafactory dataset. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory dataset|Datasets| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|create|CreateOrUpdate#Create| +### group `az datafactory dataset` +#### Command `az datafactory dataset list` -#### Parameters +##### Example +``` +az datafactory dataset list --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| -|**--dataset-name**|string|The dataset name.|dataset_name|datasetName| -|**--properties**|object|Dataset properties.|properties|properties| -|**--if-match**|string|ETag of the dataset entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| -### datafactory dataset delete +#### Command `az datafactory dataset show` -delete a datafactory dataset. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory dataset|Datasets| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|delete|Delete| - -#### Parameters +##### Example +``` +az datafactory dataset show --name "exampleDataset" --factory-name "exampleFactoryName" --resource-group \ +"exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--dataset-name**|string|The dataset name.|dataset_name|datasetName| +|**--if-none-match**|string|ETag of the dataset entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned.|if_none_match|If-None-Match| -### datafactory dataset list - -list a datafactory dataset. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory dataset|Datasets| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|list|ListByFactory| - -#### Parameters -|Option|Type|Description|Path (SDK)|Swagger name| -|------|----|-----------|----------|------------| -|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| -|**--factory-name**|string|The factory name.|factory_name|factoryName| - -### datafactory dataset show - -show a datafactory dataset. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory dataset|Datasets| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|show|Get| - -#### Parameters +#### Command `az datafactory dataset create` + +##### Example +``` +az datafactory dataset create --properties "{\\"type\\":\\"AzureBlob\\",\\"linkedServiceName\\":{\\"type\\":\\"LinkedSe\ +rviceReference\\",\\"referenceName\\":\\"exampleLinkedService\\"},\\"parameters\\":{\\"MyFileName\\":{\\"type\\":\\"Str\ +ing\\"},\\"MyFolderPath\\":{\\"type\\":\\"String\\"}},\\"typeProperties\\":{\\"format\\":{\\"type\\":\\"TextFormat\\"},\ +\\"fileName\\":{\\"type\\":\\"Expression\\",\\"value\\":\\"@dataset().MyFileName\\"},\\"folderPath\\":{\\"type\\":\\"Ex\ +pression\\",\\"value\\":\\"@dataset().MyFolderPath\\"}}}" --name "exampleDataset" --factory-name "exampleFactoryName" \ +--resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--dataset-name**|string|The dataset name.|dataset_name|datasetName| -|**--if-none-match**|string|ETag of the dataset entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned.|if_none_match|If-None-Match| - -### datafactory dataset update - -update a datafactory dataset. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory dataset|Datasets| +|**--properties**|object|Dataset properties.|properties|properties| +|**--if-match**|string|ETag of the dataset entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|update|CreateOrUpdate#Update| +#### Command `az datafactory dataset update` -#### Parameters +##### Example +``` +az datafactory dataset update --description "Example description" --linked-service-name "{\\"type\\":\\"LinkedServiceRe\ +ference\\",\\"referenceName\\":\\"exampleLinkedService\\"}" --parameters "{\\"MyFileName\\":{\\"type\\":\\"String\\"},\ +\\"MyFolderPath\\":{\\"type\\":\\"String\\"}}" --name "exampleDataset" --factory-name "exampleFactoryName" \ +--resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -141,43 +221,61 @@ update a datafactory dataset. |**--annotations**|array|List of tags that can be used for describing the Dataset.|annotations|annotations| |**--folder**|object|The folder that this Dataset is in. If not specified, Dataset will appear at the root level.|folder|folder| -### datafactory factory configure-factory-repo - -configure-factory-repo a datafactory factory. +#### Command `az datafactory dataset delete` -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory factory|Factories| +##### Example +``` +az datafactory dataset delete --name "exampleDataset" --factory-name "exampleFactoryName" --resource-group \ +"exampleResourceGroup" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| +|**--factory-name**|string|The factory name.|factory_name|factoryName| +|**--dataset-name**|string|The dataset name.|dataset_name|datasetName| -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|configure-factory-repo|ConfigureFactoryRepo| +### group `az datafactory factory` +#### Command `az datafactory factory list` -#### Parameters +##### Example +``` +az datafactory factory list --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| -|**--location**|string|The location identifier.|location|locationId| -|**--factory-resource-id**|string|The factory resource id.|factory_resource_id|factoryResourceId| -|**--factory-vsts-configuration**|object|Factory's VSTS repo information.|factory_vsts_configuration|FactoryVSTSConfiguration| -|**--factory-git-hub-configuration**|object|Factory's GitHub repo information.|factory_git_hub_configuration|FactoryGitHubConfiguration| +|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| -### datafactory factory create +#### Command `az datafactory factory list` -create a datafactory factory. +##### Example +``` +az datafactory factory list +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +#### Command `az datafactory factory show` -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory factory|Factories| +##### Example +``` +az datafactory factory show --name "exampleFactoryName" --resource-group "exampleResourceGroup" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| +|**--factory-name**|string|The factory name.|factory_name|factoryName| +|**--if-none-match**|string|ETag of the factory entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned.|if_none_match|If-None-Match| -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|create|CreateOrUpdate#Create| +#### Command `az datafactory factory create` -#### Parameters +##### Example +``` +az datafactory factory create --location "East US" --name "exampleFactoryName" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -188,42 +286,60 @@ create a datafactory factory. |**--factory-vsts-configuration**|object|Factory's VSTS repo information.|factory_vsts_configuration|FactoryVSTSConfiguration| |**--factory-git-hub-configuration**|object|Factory's GitHub repo information.|factory_git_hub_configuration|FactoryGitHubConfiguration| |**--global-parameters**|dictionary|List of parameters for factory.|global_parameters|globalParameters| +|**--public-network-access**|choice|Whether or not public network access is allowed for the data factory.|public_network_access|publicNetworkAccess| -### datafactory factory delete +#### Command `az datafactory factory update` -delete a datafactory factory. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory factory|Factories| +##### Example +``` +az datafactory factory update --name "exampleFactoryName" --tags exampleTag="exampleValue" --resource-group \ +"exampleResourceGroup" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| +|**--factory-name**|string|The factory name.|factory_name|factoryName| +|**--tags**|dictionary|The resource tags.|tags|tags| -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|delete|Delete| +#### Command `az datafactory factory delete` -#### Parameters +##### Example +``` +az datafactory factory delete --name "exampleFactoryName" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| -### datafactory factory get-data-plane-access - -get-data-plane-access a datafactory factory. +#### Command `az datafactory factory configure-factory-repo` -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory factory|Factories| +##### Example +``` +az datafactory factory configure-factory-repo --factory-resource-id "/subscriptions/12345678-1234-1234-1234-12345678abc\ +/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName" \ +--factory-vsts-configuration account-name="ADF" collaboration-branch="master" last-commit-id="" project-name="project" \ +repository-name="repo" root-folder="/" tenant-id="" --location "East US" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--location**|string|The location identifier.|location|locationId| +|**--factory-resource-id**|string|The factory resource id.|factory_resource_id|factoryResourceId| +|**--factory-vsts-configuration**|object|Factory's VSTS repo information.|factory_vsts_configuration|FactoryVSTSConfiguration| +|**--factory-git-hub-configuration**|object|Factory's GitHub repo information.|factory_git_hub_configuration|FactoryGitHubConfiguration| -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|get-data-plane-access|GetDataPlaneAccess| +#### Command `az datafactory factory get-data-plane-access` -#### Parameters +##### Example +``` +az datafactory factory get-data-plane-access --name "exampleFactoryName" --access-resource-path "" --expire-time \ +"2018-11-10T09:46:20.2659347Z" --permissions "r" --profile-name "DefaultProfile" --start-time \ +"2018-11-10T02:46:20.2659347Z" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -234,21 +350,14 @@ get-data-plane-access a datafactory factory. |**--start-time**|string|Start time for the token. If not specified the current time will be used.|start_time|startTime| |**--expire-time**|string|Expiration time for the token. Maximum duration for the token is eight hours and by default the token will expire in eight hours.|expire_time|expireTime| -### datafactory factory get-git-hub-access-token +#### Command `az datafactory factory get-git-hub-access-token` -get-git-hub-access-token a datafactory factory. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory factory|Factories| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|get-git-hub-access-token|GetGitHubAccessToken| - -#### Parameters +##### Example +``` +az datafactory factory get-git-hub-access-token --name "exampleFactoryName" --git-hub-access-code "some" \ +--git-hub-access-token-base-url "some" --git-hub-client-id "some" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -257,455 +366,266 @@ get-git-hub-access-token a datafactory factory. |**--git-hub-access-token-base-url**|string|GitHub access token base URL.|git_hub_access_token_base_url|gitHubAccessTokenBaseUrl| |**--git-hub-client-id**|string|GitHub application client ID.|git_hub_client_id|gitHubClientId| -### datafactory factory list - -list a datafactory factory. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory factory|Factories| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|list|ListByResourceGroup| -|list|List| +### group `az datafactory integration-runtime` +#### Command `az datafactory integration-runtime list` -#### Parameters -|Option|Type|Description|Path (SDK)|Swagger name| -|------|----|-----------|----------|------------| -|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| - -### datafactory factory show - -show a datafactory factory. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory factory|Factories| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|show|Get| - -#### Parameters +##### Example +``` +az datafactory integration-runtime list --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| -|**--if-none-match**|string|ETag of the factory entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned.|if_none_match|If-None-Match| - -### datafactory factory update -update a datafactory factory. +#### Command `az datafactory integration-runtime show` -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory factory|Factories| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|update|Update| - -#### Parameters -|Option|Type|Description|Path (SDK)|Swagger name| -|------|----|-----------|----------|------------| -|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| -|**--factory-name**|string|The factory name.|factory_name|factoryName| -|**--tags**|dictionary|The resource tags.|tags|tags| - -### datafactory integration-runtime delete - -delete a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|delete|Delete| - -#### Parameters +##### Example +``` +az datafactory integration-runtime show --factory-name "exampleFactoryName" --name "exampleIntegrationRuntime" \ +--resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| +|**--if-none-match**|string|ETag of the integration runtime entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned.|if_none_match|If-None-Match| -### datafactory integration-runtime get-connection-info - -get-connection-info a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|get-connection-info|GetConnectionInfo| - -#### Parameters +#### Command `az datafactory integration-runtime linked-integration-runtime create` + +##### Example +``` +az datafactory integration-runtime linked-integration-runtime create --name "bfa92911-9fb6-4fbe-8f23-beae87bc1c83" \ +--location "West US" --data-factory-name "e9955d6d-56ea-4be3-841c-52a12c1a9981" --subscription-id \ +"061774c7-4b5a-4159-a55b-365581830283" --factory-name "exampleFactoryName" --integration-runtime-name \ +"exampleIntegrationRuntime" --resource-group "exampleResourceGroup" --subscription-id "12345678-1234-1234-1234-12345678\ +abc" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| +|**--name**|string|The name of the linked integration runtime.|name|name| +|**--subscription-id**|string|The ID of the subscription that the linked integration runtime belongs to.|subscription_id|subscriptionId| +|**--data-factory-name**|string|The name of the data factory that the linked integration runtime belongs to.|data_factory_name|dataFactoryName| +|**--location**|string|The location of the data factory that the linked integration runtime belongs to.|location|dataFactoryLocation| -### datafactory integration-runtime get-monitoring-data - -get-monitoring-data a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|get-monitoring-data|GetMonitoringData| +#### Command `az datafactory integration-runtime managed create` -#### Parameters +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| +|**--if-match**|string|ETag of the integration runtime entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| +|**--description**|string|Integration runtime description.|managed_description|description| +|**--type-properties-compute-properties**|object|The compute resource for managed integration runtime.|managed_compute_properties|computeProperties| +|**--type-properties-ssis-properties**|object|SSIS properties for managed integration runtime.|managed_ssis_properties|ssisProperties| -### datafactory integration-runtime get-status - -get-status a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|get-status|GetStatus| +#### Command `az datafactory integration-runtime self-hosted create` -#### Parameters +##### Example +``` +az datafactory integration-runtime self-hosted create --factory-name "exampleFactoryName" --description "A selfhosted \ +integration runtime" --name "exampleIntegrationRuntime" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| +|**--if-match**|string|ETag of the integration runtime entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| +|**--description**|string|Integration runtime description.|self_hosted_description|description| +|**--type-properties-linked-info**|object|The base definition of a linked integration runtime.|self_hosted_linked_info|linkedInfo| -### datafactory integration-runtime linked-integration-runtime create - -linked-integration-runtime create a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|linked-integration-runtime create|CreateLinkedIntegrationRuntime| +#### Command `az datafactory integration-runtime update` -#### Parameters +##### Example +``` +az datafactory integration-runtime update --factory-name "exampleFactoryName" --name "exampleIntegrationRuntime" \ +--resource-group "exampleResourceGroup" --auto-update "Off" --update-delay-offset "\\"PT3H\\"" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| -|**--name**|string|The name of the linked integration runtime.|name|name| -|**--subscription-id**|string|The ID of the subscription that the linked integration runtime belongs to.|subscription_id|subscriptionId| -|**--data-factory-name**|string|The name of the data factory that the linked integration runtime belongs to.|data_factory_name|dataFactoryName| -|**--location**|string|The location of the data factory that the linked integration runtime belongs to.|location|dataFactoryLocation| - -### datafactory integration-runtime list - -list a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|list|ListByFactory| - -#### Parameters -|Option|Type|Description|Path (SDK)|Swagger name| -|------|----|-----------|----------|------------| -|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| -|**--factory-name**|string|The factory name.|factory_name|factoryName| - -### datafactory integration-runtime list-auth-key - -list-auth-key a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| +|**--auto-update**|choice|Enables or disables the auto-update feature of the self-hosted integration runtime. See https://go.microsoft.com/fwlink/?linkid=854189.|auto_update|autoUpdate| +|**--update-delay-offset**|string|The time offset (in hours) in the day, e.g., PT03H is 3 hours. The integration runtime auto update will happen on that time.|update_delay_offset|updateDelayOffset| -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|list-auth-key|ListAuthKeys| +#### Command `az datafactory integration-runtime delete` -#### Parameters +##### Example +``` +az datafactory integration-runtime delete --factory-name "exampleFactoryName" --name "exampleIntegrationRuntime" \ +--resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| -### datafactory integration-runtime managed create - -managed create a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|managed create|CreateOrUpdate#Create#Managed| +#### Command `az datafactory integration-runtime get-connection-info` -#### Parameters +##### Example +``` +az datafactory integration-runtime get-connection-info --factory-name "exampleFactoryName" --name \ +"exampleIntegrationRuntime" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| -|**--if-match**|string|ETag of the integration runtime entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| -|**--description**|string|Integration runtime description.|managed_description|description| -|**--type-properties-compute-properties**|object|The compute resource for managed integration runtime.|managed_compute_properties|computeProperties| -|**--type-properties-ssis-properties**|object|SSIS properties for managed integration runtime.|managed_ssis_properties|ssisProperties| - -### datafactory integration-runtime regenerate-auth-key -regenerate-auth-key a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| +#### Command `az datafactory integration-runtime get-monitoring-data` -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|regenerate-auth-key|RegenerateAuthKey| - -#### Parameters +##### Example +``` +az datafactory integration-runtime get-monitoring-data --factory-name "exampleFactoryName" --name \ +"exampleIntegrationRuntime" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| -|**--key-name**|choice|The name of the authentication key to regenerate.|key_name|keyName| - -### datafactory integration-runtime remove-link - -remove-link a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|remove-link|RemoveLinks| +#### Command `az datafactory integration-runtime get-status` -#### Parameters +##### Example +``` +az datafactory integration-runtime get-status --factory-name "exampleFactoryName" --name "exampleIntegrationRuntime" \ +--resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| -|**--linked-factory-name**|string|The data factory name for linked integration runtime.|linked_factory_name|linkedFactoryName| - -### datafactory integration-runtime self-hosted create - -self-hosted create a datafactory integration-runtime. -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|self-hosted create|CreateOrUpdate#Create#SelfHosted| +#### Command `az datafactory integration-runtime list-auth-key` -#### Parameters +##### Example +``` +az datafactory integration-runtime list-auth-key --factory-name "exampleFactoryName" --name \ +"exampleIntegrationRuntime" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| -|**--if-match**|string|ETag of the integration runtime entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| -|**--description**|string|Integration runtime description.|self_hosted_description|description| -|**--type-properties-linked-info**|object|The base definition of a linked integration runtime.|self_hosted_linked_info|linkedInfo| - -### datafactory integration-runtime show - -show a datafactory integration-runtime. -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|show|Get| +#### Command `az datafactory integration-runtime regenerate-auth-key` -#### Parameters +##### Example +``` +az datafactory integration-runtime regenerate-auth-key --factory-name "exampleFactoryName" --name \ +"exampleIntegrationRuntime" --key-name "authKey2" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| -|**--if-none-match**|string|ETag of the integration runtime entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned.|if_none_match|If-None-Match| - -### datafactory integration-runtime start - -start a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| +|**--key-name**|choice|The name of the authentication key to regenerate.|key_name|keyName| -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|start|Start| +#### Command `az datafactory integration-runtime remove-link` -#### Parameters +##### Example +``` +az datafactory integration-runtime remove-link --factory-name "exampleFactoryName" --name "exampleIntegrationRuntime" \ +--linked-factory-name "exampleFactoryName-linked" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| +|**--linked-factory-name**|string|The data factory name for linked integration runtime.|linked_factory_name|linkedFactoryName| -### datafactory integration-runtime stop - -stop a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| +#### Command `az datafactory integration-runtime start` -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|stop|Stop| - -#### Parameters +##### Example +``` +az datafactory integration-runtime start --factory-name "exampleFactoryName" --name "exampleManagedIntegrationRuntime" \ +--resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| -### datafactory integration-runtime sync-credentials - -sync-credentials a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|sync-credentials|SyncCredentials| +#### Command `az datafactory integration-runtime stop` -#### Parameters +##### Example +``` +az datafactory integration-runtime stop --factory-name "exampleFactoryName" --name "exampleManagedIntegrationRuntime" \ +--resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| -### datafactory integration-runtime update - -update a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|update|Update| +#### Command `az datafactory integration-runtime sync-credentials` -#### Parameters +##### Example +``` +az datafactory integration-runtime sync-credentials --factory-name "exampleFactoryName" --name \ +"exampleIntegrationRuntime" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| -|**--auto-update**|choice|Enables or disables the auto-update feature of the self-hosted integration runtime. See https://go.microsoft.com/fwlink/?linkid=854189.|auto_update|autoUpdate| -|**--update-delay-offset**|string|The time offset (in hours) in the day, e.g., PT03H is 3 hours. The integration runtime auto update will happen on that time.|update_delay_offset|updateDelayOffset| - -### datafactory integration-runtime upgrade - -upgrade a datafactory integration-runtime. -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|upgrade|Upgrade| +#### Command `az datafactory integration-runtime upgrade` -#### Parameters +##### Example +``` +az datafactory integration-runtime upgrade --factory-name "exampleFactoryName" --name "exampleIntegrationRuntime" \ +--resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| -### datafactory integration-runtime-node delete +### group `az datafactory integration-runtime-node` +#### Command `az datafactory integration-runtime-node show` -delete a datafactory integration-runtime-node. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime-node|IntegrationRuntimeNodes| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|delete|Delete| - -#### Parameters +##### Example +``` +az datafactory integration-runtime-node show --factory-name "exampleFactoryName" --integration-runtime-name \ +"exampleIntegrationRuntime" --node-name "Node_1" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -713,43 +633,30 @@ delete a datafactory integration-runtime-node. |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| |**--node-name**|string|The integration runtime node name.|node_name|nodeName| -### datafactory integration-runtime-node get-ip-address - -get-ip-address a datafactory integration-runtime-node. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime-node|IntegrationRuntimeNodes| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|get-ip-address|GetIpAddress| +#### Command `az datafactory integration-runtime-node update` -#### Parameters +##### Example +``` +az datafactory integration-runtime-node update --factory-name "exampleFactoryName" --integration-runtime-name \ +"exampleIntegrationRuntime" --node-name "Node_1" --resource-group "exampleResourceGroup" --concurrent-jobs-limit 2 +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| |**--node-name**|string|The integration runtime node name.|node_name|nodeName| +|**--concurrent-jobs-limit**|integer|The number of concurrent jobs permitted to run on the integration runtime node. Values between 1 and maxConcurrentJobs(inclusive) are allowed.|concurrent_jobs_limit|concurrentJobsLimit| -### datafactory integration-runtime-node show - -show a datafactory integration-runtime-node. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime-node|IntegrationRuntimeNodes| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|show|Get| +#### Command `az datafactory integration-runtime-node delete` -#### Parameters +##### Example +``` +az datafactory integration-runtime-node delete --factory-name "exampleFactoryName" --integration-runtime-name \ +"exampleIntegrationRuntime" --node-name "Node_1" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -757,130 +664,75 @@ show a datafactory integration-runtime-node. |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| |**--node-name**|string|The integration runtime node name.|node_name|nodeName| -### datafactory integration-runtime-node update +#### Command `az datafactory integration-runtime-node get-ip-address` -update a datafactory integration-runtime-node. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime-node|IntegrationRuntimeNodes| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|update|Update| - -#### Parameters +##### Example +``` +az datafactory integration-runtime-node get-ip-address --factory-name "exampleFactoryName" --integration-runtime-name \ +"exampleIntegrationRuntime" --node-name "Node_1" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| |**--node-name**|string|The integration runtime node name.|node_name|nodeName| -|**--concurrent-jobs-limit**|integer|The number of concurrent jobs permitted to run on the integration runtime node. Values between 1 and maxConcurrentJobs(inclusive) are allowed.|concurrent_jobs_limit|concurrentJobsLimit| -### datafactory linked-service create +### group `az datafactory linked-service` +#### Command `az datafactory linked-service list` -create a datafactory linked-service. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory linked-service|LinkedServices| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|create|CreateOrUpdate#Create| - -#### Parameters +##### Example +``` +az datafactory linked-service list --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| -|**--linked-service-name**|string|The linked service name.|linked_service_name|linkedServiceName| -|**--properties**|object|Properties of linked service.|properties|properties| -|**--if-match**|string|ETag of the linkedService entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| - -### datafactory linked-service delete -delete a datafactory linked-service. +#### Command `az datafactory linked-service show` -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory linked-service|LinkedServices| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|delete|Delete| - -#### Parameters +##### Example +``` +az datafactory linked-service show --factory-name "exampleFactoryName" --name "exampleLinkedService" --resource-group \ +"exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--linked-service-name**|string|The linked service name.|linked_service_name|linkedServiceName| +|**--if-none-match**|string|ETag of the linked service entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned.|if_none_match|If-None-Match| -### datafactory linked-service list - -list a datafactory linked-service. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory linked-service|LinkedServices| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|list|ListByFactory| - -#### Parameters -|Option|Type|Description|Path (SDK)|Swagger name| -|------|----|-----------|----------|------------| -|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| -|**--factory-name**|string|The factory name.|factory_name|factoryName| - -### datafactory linked-service show - -show a datafactory linked-service. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory linked-service|LinkedServices| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|show|Get| +#### Command `az datafactory linked-service create` -#### Parameters +##### Example +``` +az datafactory linked-service create --factory-name "exampleFactoryName" --properties "{\\"type\\":\\"AzureStorage\\",\ +\\"typeProperties\\":{\\"connectionString\\":{\\"type\\":\\"SecureString\\",\\"value\\":\\"DefaultEndpointsProtocol=htt\ +ps;AccountName=examplestorageaccount;AccountKey=\\"}}}" --name "exampleLinkedService" --resource-group \ +"exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--linked-service-name**|string|The linked service name.|linked_service_name|linkedServiceName| -|**--if-none-match**|string|ETag of the linked service entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned.|if_none_match|If-None-Match| - -### datafactory linked-service update - -update a datafactory linked-service. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory linked-service|LinkedServices| +|**--properties**|object|Properties of linked service.|properties|properties| +|**--if-match**|string|ETag of the linkedService entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|update|CreateOrUpdate#Update| +#### Command `az datafactory linked-service update` -#### Parameters +##### Example +``` +az datafactory linked-service update --factory-name "exampleFactoryName" --description "Example description" --name \ +"exampleLinkedService" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -892,133 +744,89 @@ update a datafactory linked-service. |**--parameters**|dictionary|Parameters for linked service.|parameters|parameters| |**--annotations**|array|List of tags that can be used for describing the linked service.|annotations|annotations| -### datafactory pipeline create - -create a datafactory pipeline. +#### Command `az datafactory linked-service delete` -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory pipeline|Pipelines| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|create|CreateOrUpdate#Create| - -#### Parameters +##### Example +``` +az datafactory linked-service delete --factory-name "exampleFactoryName" --name "exampleLinkedService" \ +--resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| -|**--pipeline-name**|string|The pipeline name.|pipeline_name|pipelineName| -|**--pipeline**|object|Pipeline resource definition.|pipeline|pipeline| -|**--if-match**|string|ETag of the pipeline entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| - -### datafactory pipeline create-run - -create-run a datafactory pipeline. +|**--linked-service-name**|string|The linked service name.|linked_service_name|linkedServiceName| -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory pipeline|Pipelines| +### group `az datafactory pipeline` +#### Command `az datafactory pipeline list` -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|create-run|CreateRun| - -#### Parameters +##### Example +``` +az datafactory pipeline list --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| -|**--pipeline-name**|string|The pipeline name.|pipeline_name|pipelineName| -|**--reference-pipeline-run-id**|string|The pipeline run identifier. If run ID is specified the parameters of the specified run will be used to create a new run.|reference_pipeline_run_id|referencePipelineRunId| -|**--is-recovery**|boolean|Recovery mode flag. If recovery mode is set to true, the specified referenced pipeline run and the new run will be grouped under the same groupId.|is_recovery|isRecovery| -|**--start-activity-name**|string|In recovery mode, the rerun will start from this activity. If not specified, all activities will run.|start_activity_name|startActivityName| -|**--start-from-failure**|boolean|In recovery mode, if set to true, the rerun will start from failed activities. The property will be used only if startActivityName is not specified.|start_from_failure|startFromFailure| -|**--parameters**|dictionary|Parameters of the pipeline run. These parameters will be used only if the runId is not specified.|parameters|parameters| -### datafactory pipeline delete +#### Command `az datafactory pipeline show` -delete a datafactory pipeline. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory pipeline|Pipelines| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|delete|Delete| - -#### Parameters +##### Example +``` +az datafactory pipeline show --factory-name "exampleFactoryName" --name "examplePipeline" --resource-group \ +"exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--pipeline-name**|string|The pipeline name.|pipeline_name|pipelineName| +|**--if-none-match**|string|ETag of the pipeline entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned.|if_none_match|If-None-Match| -### datafactory pipeline list - -list a datafactory pipeline. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory pipeline|Pipelines| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|list|ListByFactory| - -#### Parameters -|Option|Type|Description|Path (SDK)|Swagger name| -|------|----|-----------|----------|------------| -|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| -|**--factory-name**|string|The factory name.|factory_name|factoryName| - -### datafactory pipeline show - -show a datafactory pipeline. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory pipeline|Pipelines| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|show|Get| - -#### Parameters +#### Command `az datafactory pipeline create` + +##### Example +``` +az datafactory pipeline create --factory-name "exampleFactoryName" --pipeline "{\\"activities\\":[{\\"name\\":\\"Exampl\ +eForeachActivity\\",\\"type\\":\\"ForEach\\",\\"typeProperties\\":{\\"activities\\":[{\\"name\\":\\"ExampleCopyActivity\ +\\",\\"type\\":\\"Copy\\",\\"inputs\\":[{\\"type\\":\\"DatasetReference\\",\\"parameters\\":{\\"MyFileName\\":\\"exampl\ +econtainer.csv\\",\\"MyFolderPath\\":\\"examplecontainer\\"},\\"referenceName\\":\\"exampleDataset\\"}],\\"outputs\\":[\ +{\\"type\\":\\"DatasetReference\\",\\"parameters\\":{\\"MyFileName\\":{\\"type\\":\\"Expression\\",\\"value\\":\\"@item\ +()\\"},\\"MyFolderPath\\":\\"examplecontainer\\"},\\"referenceName\\":\\"exampleDataset\\"}],\\"typeProperties\\":{\\"d\ +ataIntegrationUnits\\":32,\\"sink\\":{\\"type\\":\\"BlobSink\\"},\\"source\\":{\\"type\\":\\"BlobSource\\"}}}],\\"isSeq\ +uential\\":true,\\"items\\":{\\"type\\":\\"Expression\\",\\"value\\":\\"@pipeline().parameters.OutputBlobNameList\\"}}}\ +],\\"parameters\\":{\\"JobId\\":{\\"type\\":\\"String\\"},\\"OutputBlobNameList\\":{\\"type\\":\\"Array\\"}},\\"variabl\ +es\\":{\\"TestVariableArray\\":{\\"type\\":\\"Array\\"}},\\"runDimensions\\":{\\"JobId\\":{\\"type\\":\\"Expression\\",\ +\\"value\\":\\"@pipeline().parameters.JobId\\"}}}" --name "examplePipeline" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--pipeline-name**|string|The pipeline name.|pipeline_name|pipelineName| -|**--if-none-match**|string|ETag of the pipeline entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned.|if_none_match|If-None-Match| - -### datafactory pipeline update - -update a datafactory pipeline. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory pipeline|Pipelines| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|update|CreateOrUpdate#Update| +|**--pipeline**|object|Pipeline resource definition.|pipeline|pipeline| +|**--if-match**|string|ETag of the pipeline entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| -#### Parameters +#### Command `az datafactory pipeline update` + +##### Example +``` +az datafactory pipeline update --factory-name "exampleFactoryName" --description "Example description" --activities \ +"[{\\"name\\":\\"ExampleForeachActivity\\",\\"type\\":\\"ForEach\\",\\"typeProperties\\":{\\"activities\\":[{\\"name\\"\ +:\\"ExampleCopyActivity\\",\\"type\\":\\"Copy\\",\\"inputs\\":[{\\"type\\":\\"DatasetReference\\",\\"parameters\\":{\\"\ +MyFileName\\":\\"examplecontainer.csv\\",\\"MyFolderPath\\":\\"examplecontainer\\"},\\"referenceName\\":\\"exampleDatas\ +et\\"}],\\"outputs\\":[{\\"type\\":\\"DatasetReference\\",\\"parameters\\":{\\"MyFileName\\":{\\"type\\":\\"Expression\ +\\",\\"value\\":\\"@item()\\"},\\"MyFolderPath\\":\\"examplecontainer\\"},\\"referenceName\\":\\"exampleDataset\\"}],\\\ +"typeProperties\\":{\\"dataIntegrationUnits\\":32,\\"sink\\":{\\"type\\":\\"BlobSink\\"},\\"source\\":{\\"type\\":\\"Bl\ +obSource\\"}}}],\\"isSequential\\":true,\\"items\\":{\\"type\\":\\"Expression\\",\\"value\\":\\"@pipeline().parameters.\ +OutputBlobNameList\\"}}}]" --parameters "{\\"OutputBlobNameList\\":{\\"type\\":\\"Array\\"}}" --name "examplePipeline" \ +--resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -1034,43 +842,78 @@ update a datafactory pipeline. |**--run-dimensions**|dictionary|Dimensions emitted by Pipeline.|run_dimensions|runDimensions| |**--folder-name**|string|The name of the folder that this Pipeline is in.|name|name| -### datafactory pipeline-run cancel +#### Command `az datafactory pipeline delete` -cancel a datafactory pipeline-run. +##### Example +``` +az datafactory pipeline delete --factory-name "exampleFactoryName" --name "examplePipeline" --resource-group \ +"exampleResourceGroup" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| +|**--factory-name**|string|The factory name.|factory_name|factoryName| +|**--pipeline-name**|string|The pipeline name.|pipeline_name|pipelineName| -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory pipeline-run|PipelineRuns| +#### Command `az datafactory pipeline create-run` -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|cancel|Cancel| +##### Example +``` +az datafactory pipeline create-run --factory-name "exampleFactoryName" --parameters "{\\"OutputBlobNameList\\":[\\"exam\ +pleoutput.csv\\"]}" --name "examplePipeline" --resource-group "exampleResourceGroup" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| +|**--factory-name**|string|The factory name.|factory_name|factoryName| +|**--pipeline-name**|string|The pipeline name.|pipeline_name|pipelineName| +|**--reference-pipeline-run-id**|string|The pipeline run identifier. If run ID is specified the parameters of the specified run will be used to create a new run.|reference_pipeline_run_id|referencePipelineRunId| +|**--is-recovery**|boolean|Recovery mode flag. If recovery mode is set to true, the specified referenced pipeline run and the new run will be grouped under the same groupId.|is_recovery|isRecovery| +|**--start-activity-name**|string|In recovery mode, the rerun will start from this activity. If not specified, all activities will run.|start_activity_name|startActivityName| +|**--start-from-failure**|boolean|In recovery mode, if set to true, the rerun will start from failed activities. The property will be used only if startActivityName is not specified.|start_from_failure|startFromFailure| +|**--parameters**|dictionary|Parameters of the pipeline run. These parameters will be used only if the runId is not specified.|parameters|parameters| -#### Parameters +### group `az datafactory pipeline-run` +#### Command `az datafactory pipeline-run show` + +##### Example +``` +az datafactory pipeline-run show --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" --run-id \ +"2f7fdb90-5df1-4b8e-ac2f-064cfa58202b" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--run-id**|string|The pipeline run identifier.|run_id|runId| -|**--is-recursive**|boolean|If true, cancel all the Child pipelines that are triggered by the current pipeline.|is_recursive|isRecursive| -### datafactory pipeline-run query-by-factory +#### Command `az datafactory pipeline-run cancel` -query-by-factory a datafactory pipeline-run. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory pipeline-run|PipelineRuns| +##### Example +``` +az datafactory pipeline-run cancel --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" \ +--run-id "16ac5348-ff82-4f95-a80d-638c1d47b721" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| +|**--factory-name**|string|The factory name.|factory_name|factoryName| +|**--run-id**|string|The pipeline run identifier.|run_id|runId| +|**--is-recursive**|boolean|If true, cancel all the Child pipelines that are triggered by the current pipeline.|is_recursive|isRecursive| -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|query-by-factory|QueryByFactory| +#### Command `az datafactory pipeline-run query-by-factory` -#### Parameters +##### Example +``` +az datafactory pipeline-run query-by-factory --factory-name "exampleFactoryName" --filters operand="PipelineName" \ +operator="Equals" values="examplePipeline" --last-updated-after "2018-06-16T00:36:44.3345758Z" --last-updated-before \ +"2018-06-16T00:49:48.3686473Z" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -1081,279 +924,178 @@ query-by-factory a datafactory pipeline-run. |**--filters**|array|List of filters.|filters|filters| |**--order-by**|array|List of OrderBy option.|order_by|orderBy| -### datafactory pipeline-run show +### group `az datafactory trigger` +#### Command `az datafactory trigger list` -show a datafactory pipeline-run. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory pipeline-run|PipelineRuns| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|show|Get| - -#### Parameters +##### Example +``` +az datafactory trigger list --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| -|**--run-id**|string|The pipeline run identifier.|run_id|runId| - -### datafactory trigger create - -create a datafactory trigger. -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory trigger|Triggers| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|create|CreateOrUpdate#Create| +#### Command `az datafactory trigger show` -#### Parameters +##### Example +``` +az datafactory trigger show --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" --name \ +"exampleTrigger" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--trigger-name**|string|The trigger name.|trigger_name|triggerName| -|**--properties**|object|Properties of the trigger.|properties|properties| -|**--if-match**|string|ETag of the trigger entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| - -### datafactory trigger delete - -delete a datafactory trigger. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory trigger|Triggers| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|delete|Delete| +|**--if-none-match**|string|ETag of the trigger entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned.|if_none_match|If-None-Match| -#### Parameters +#### Command `az datafactory trigger create` + +##### Example +``` +az datafactory trigger create --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" --properties \ +"{\\"type\\":\\"ScheduleTrigger\\",\\"pipelines\\":[{\\"parameters\\":{\\"OutputBlobNameList\\":[\\"exampleoutput.csv\\\ +"]},\\"pipelineReference\\":{\\"type\\":\\"PipelineReference\\",\\"referenceName\\":\\"examplePipeline\\"}}],\\"typePro\ +perties\\":{\\"recurrence\\":{\\"endTime\\":\\"2018-06-16T00:55:13.8441801Z\\",\\"frequency\\":\\"Minute\\",\\"interval\ +\\":4,\\"startTime\\":\\"2018-06-16T00:39:13.8441801Z\\",\\"timeZone\\":\\"UTC\\"}}}" --name "exampleTrigger" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--trigger-name**|string|The trigger name.|trigger_name|triggerName| +|**--properties**|object|Properties of the trigger.|properties|properties| +|**--if-match**|string|ETag of the trigger entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| -### datafactory trigger get-event-subscription-status - -get-event-subscription-status a datafactory trigger. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory trigger|Triggers| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|get-event-subscription-status|GetEventSubscriptionStatus| +#### Command `az datafactory trigger update` -#### Parameters +##### Example +``` +az datafactory trigger update --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" \ +--description "Example description" --name "exampleTrigger" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--trigger-name**|string|The trigger name.|trigger_name|triggerName| +|**--if-match**|string|ETag of the trigger entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| +|**--description**|string|Trigger description.|description|description| +|**--annotations**|array|List of tags that can be used for describing the trigger.|annotations|annotations| -### datafactory trigger list - -list a datafactory trigger. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory trigger|Triggers| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|list|ListByFactory| - -#### Parameters -|Option|Type|Description|Path (SDK)|Swagger name| -|------|----|-----------|----------|------------| -|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| -|**--factory-name**|string|The factory name.|factory_name|factoryName| - -### datafactory trigger query-by-factory - -query-by-factory a datafactory trigger. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory trigger|Triggers| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|query-by-factory|QueryByFactory| +#### Command `az datafactory trigger delete` -#### Parameters +##### Example +``` +az datafactory trigger delete --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" --name \ +"exampleTrigger" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| -|**--continuation-token**|string|The continuation token for getting the next page of results. Null for first page.|continuation_token|continuationToken| -|**--parent-trigger-name**|string|The name of the parent TumblingWindowTrigger to get the child rerun triggers|parent_trigger_name|parentTriggerName| - -### datafactory trigger show - -show a datafactory trigger. +|**--trigger-name**|string|The trigger name.|trigger_name|triggerName| -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory trigger|Triggers| +#### Command `az datafactory trigger get-event-subscription-status` -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|show|Get| - -#### Parameters +##### Example +``` +az datafactory trigger get-event-subscription-status --factory-name "exampleFactoryName" --resource-group \ +"exampleResourceGroup" --name "exampleTrigger" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--trigger-name**|string|The trigger name.|trigger_name|triggerName| -|**--if-none-match**|string|ETag of the trigger entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned.|if_none_match|If-None-Match| - -### datafactory trigger start -start a datafactory trigger. +#### Command `az datafactory trigger query-by-factory` -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory trigger|Triggers| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|start|Start| - -#### Parameters +##### Example +``` +az datafactory trigger query-by-factory --factory-name "exampleFactoryName" --parent-trigger-name "exampleTrigger" \ +--resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| -|**--trigger-name**|string|The trigger name.|trigger_name|triggerName| - -### datafactory trigger stop - -stop a datafactory trigger. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory trigger|Triggers| +|**--continuation-token**|string|The continuation token for getting the next page of results. Null for first page.|continuation_token|continuationToken| +|**--parent-trigger-name**|string|The name of the parent TumblingWindowTrigger to get the child rerun triggers|parent_trigger_name|parentTriggerName| -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|stop|Stop| +#### Command `az datafactory trigger start` -#### Parameters +##### Example +``` +az datafactory trigger start --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" --name \ +"exampleTrigger" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--trigger-name**|string|The trigger name.|trigger_name|triggerName| -### datafactory trigger subscribe-to-event - -subscribe-to-event a datafactory trigger. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory trigger|Triggers| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|subscribe-to-event|SubscribeToEvents| +#### Command `az datafactory trigger stop` -#### Parameters +##### Example +``` +az datafactory trigger stop --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" --name \ +"exampleTrigger" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--trigger-name**|string|The trigger name.|trigger_name|triggerName| -### datafactory trigger unsubscribe-from-event - -unsubscribe-from-event a datafactory trigger. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory trigger|Triggers| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|unsubscribe-from-event|UnsubscribeFromEvents| +#### Command `az datafactory trigger subscribe-to-event` -#### Parameters +##### Example +``` +az datafactory trigger subscribe-to-event --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" \ +--name "exampleTrigger" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--trigger-name**|string|The trigger name.|trigger_name|triggerName| -### datafactory trigger update - -update a datafactory trigger. +#### Command `az datafactory trigger unsubscribe-from-event` -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory trigger|Triggers| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|update|CreateOrUpdate#Update| - -#### Parameters +##### Example +``` +az datafactory trigger unsubscribe-from-event --factory-name "exampleFactoryName" --resource-group \ +"exampleResourceGroup" --name "exampleTrigger" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--trigger-name**|string|The trigger name.|trigger_name|triggerName| -|**--if-match**|string|ETag of the trigger entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| -|**--description**|string|Trigger description.|description|description| -|**--annotations**|array|List of tags that can be used for describing the trigger.|annotations|annotations| -### datafactory trigger-run cancel +### group `az datafactory trigger-run` +#### Command `az datafactory trigger-run cancel` -cancel a datafactory trigger-run. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory trigger-run|TriggerRuns| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|cancel|Cancel| - -#### Parameters +##### Example +``` +az datafactory trigger-run cancel --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" --run-id \ +"2f7fdb90-5df1-4b8e-ac2f-064cfa58202b" --trigger-name "exampleTrigger" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -1361,21 +1103,15 @@ cancel a datafactory trigger-run. |**--trigger-name**|string|The trigger name.|trigger_name|triggerName| |**--run-id**|string|The pipeline run identifier.|run_id|runId| -### datafactory trigger-run query-by-factory - -query-by-factory a datafactory trigger-run. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory trigger-run|TriggerRuns| +#### Command `az datafactory trigger-run query-by-factory` -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|query-by-factory|QueryByFactory| - -#### Parameters +##### Example +``` +az datafactory trigger-run query-by-factory --factory-name "exampleFactoryName" --filters operand="TriggerName" \ +operator="Equals" values="exampleTrigger" --last-updated-after "2018-06-16T00:36:44.3345758Z" --last-updated-before \ +"2018-06-16T00:49:48.3686473Z" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -1386,21 +1122,14 @@ query-by-factory a datafactory trigger-run. |**--filters**|array|List of filters.|filters|filters| |**--order-by**|array|List of OrderBy option.|order_by|orderBy| -### datafactory trigger-run rerun - -rerun a datafactory trigger-run. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory trigger-run|TriggerRuns| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|rerun|Rerun| +#### Command `az datafactory trigger-run rerun` -#### Parameters +##### Example +``` +az datafactory trigger-run rerun --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" --run-id \ +"2f7fdb90-5df1-4b8e-ac2f-064cfa58202b" --trigger-name "exampleTrigger" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| diff --git a/src/datafactory/setup.py b/src/datafactory/setup.py index 116e5e2b8cc..a26a3db9b84 100644 --- a/src/datafactory/setup.py +++ b/src/datafactory/setup.py @@ -10,7 +10,7 @@ from setuptools import setup, find_packages # HISTORY.rst entry. -VERSION = '0.2.0' +VERSION = '0.1.0' try: from azext_datafactory.manual.version import VERSION except ImportError: @@ -33,7 +33,7 @@ DEPENDENCIES = [] try: - from .manual.dependency import DEPENDENCIES + from azext_datafactory.manual.dependency import DEPENDENCIES except ImportError: pass