diff --git a/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/_synapse_management_client.py b/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/_synapse_management_client.py index 56a31debe3c6..b35e01bd685c 100644 --- a/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/_synapse_management_client.py +++ b/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/_synapse_management_client.py @@ -36,6 +36,10 @@ from .operations import SqlPoolVulnerabilityAssessmentScansOperations from .operations import SqlPoolSecurityAlertPoliciesOperations from .operations import SqlPoolVulnerabilityAssessmentRuleBaselinesOperations +from .operations import ExtendedSqlPoolBlobAuditingPoliciesOperations +from .operations import DataMaskingPoliciesOperations +from .operations import DataMaskingRulesOperations +from .operations import SqlPoolColumnsOperations from .operations import WorkspacesOperations from .operations import WorkspaceAadAdminsOperations from .operations import WorkspaceManagedIdentitySqlControlSettingsOperations @@ -106,6 +110,14 @@ class SynapseManagementClient(SDKClient): :vartype sql_pool_security_alert_policies: azure.mgmt.synapse.operations.SqlPoolSecurityAlertPoliciesOperations :ivar sql_pool_vulnerability_assessment_rule_baselines: SqlPoolVulnerabilityAssessmentRuleBaselines operations :vartype sql_pool_vulnerability_assessment_rule_baselines: azure.mgmt.synapse.operations.SqlPoolVulnerabilityAssessmentRuleBaselinesOperations + :ivar extended_sql_pool_blob_auditing_policies: ExtendedSqlPoolBlobAuditingPolicies operations + :vartype extended_sql_pool_blob_auditing_policies: azure.mgmt.synapse.operations.ExtendedSqlPoolBlobAuditingPoliciesOperations + :ivar data_masking_policies: DataMaskingPolicies operations + :vartype data_masking_policies: azure.mgmt.synapse.operations.DataMaskingPoliciesOperations + :ivar data_masking_rules: DataMaskingRules operations + :vartype data_masking_rules: azure.mgmt.synapse.operations.DataMaskingRulesOperations + :ivar sql_pool_columns: SqlPoolColumns operations + :vartype sql_pool_columns: azure.mgmt.synapse.operations.SqlPoolColumnsOperations :ivar workspaces: Workspaces operations :vartype workspaces: azure.mgmt.synapse.operations.WorkspacesOperations :ivar workspace_aad_admins: WorkspaceAadAdmins operations @@ -202,6 +214,14 @@ def __init__( self._client, self.config, self._serialize, self._deserialize) self.sql_pool_vulnerability_assessment_rule_baselines = SqlPoolVulnerabilityAssessmentRuleBaselinesOperations( self._client, self.config, self._serialize, self._deserialize) + self.extended_sql_pool_blob_auditing_policies = ExtendedSqlPoolBlobAuditingPoliciesOperations( + self._client, self.config, self._serialize, self._deserialize) + self.data_masking_policies = DataMaskingPoliciesOperations( + self._client, self.config, self._serialize, self._deserialize) + self.data_masking_rules = DataMaskingRulesOperations( + self._client, self.config, self._serialize, self._deserialize) + self.sql_pool_columns = SqlPoolColumnsOperations( + self._client, self.config, self._serialize, self._deserialize) self.workspaces = WorkspacesOperations( self._client, self.config, self._serialize, self._deserialize) self.workspace_aad_admins = WorkspaceAadAdminsOperations( diff --git a/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/models/__init__.py b/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/models/__init__.py index ac297f863a74..cd576c09ee78 100644 --- a/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/models/__init__.py +++ b/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/models/__init__.py @@ -24,6 +24,8 @@ from ._models_py3 import CreateSqlPoolRestorePointDefinition from ._models_py3 import CustomSetupBase from ._models_py3 import DataLakeStorageAccountDetails + from ._models_py3 import DataMaskingPolicy + from ._models_py3 import DataMaskingRule from ._models_py3 import DataWarehouseUserActivities from ._models_py3 import EntityReference from ._models_py3 import EnvironmentVariableSetup @@ -31,6 +33,7 @@ from ._models_py3 import ErrorContract, ErrorContractException from ._models_py3 import ErrorDetail from ._models_py3 import ErrorResponse + from ._models_py3 import ExtendedSqlPoolBlobAuditingPolicy from ._models_py3 import GeoBackupPolicy from ._models_py3 import GetSsisObjectMetadataRequest from ._models_py3 import IntegrationRuntime @@ -52,6 +55,7 @@ from ._models_py3 import IntegrationRuntimeVNetProperties from ._models_py3 import IpFirewallRuleInfo from ._models_py3 import IpFirewallRuleProperties + from ._models_py3 import LibraryInfo from ._models_py3 import LibraryRequirements from ._models_py3 import LinkedIntegrationRuntime from ._models_py3 import LinkedIntegrationRuntimeKeyAuthorization @@ -65,6 +69,7 @@ from ._models_py3 import ManagedIntegrationRuntimeNode from ._models_py3 import ManagedIntegrationRuntimeOperationResult from ._models_py3 import ManagedIntegrationRuntimeStatus + from ._models_py3 import ManagedVirtualNetworkSettings from ._models_py3 import MetadataSyncConfig from ._models_py3 import OperationMetaLogSpecification from ._models_py3 import OperationMetaMetricDimensionSpecification @@ -133,6 +138,7 @@ from ._models_py3 import Workspace from ._models_py3 import WorkspaceAadAdminInfo from ._models_py3 import WorkspacePatchInfo + from ._models_py3 import WorkspacePatchInfoManagedVirtualNetworkSettings except (SyntaxError, ImportError): from ._models import AutoPauseProperties from ._models import AutoScaleProperties @@ -148,6 +154,8 @@ from ._models import CreateSqlPoolRestorePointDefinition from ._models import CustomSetupBase from ._models import DataLakeStorageAccountDetails + from ._models import DataMaskingPolicy + from ._models import DataMaskingRule from ._models import DataWarehouseUserActivities from ._models import EntityReference from ._models import EnvironmentVariableSetup @@ -155,6 +163,7 @@ from ._models import ErrorContract, ErrorContractException from ._models import ErrorDetail from ._models import ErrorResponse + from ._models import ExtendedSqlPoolBlobAuditingPolicy from ._models import GeoBackupPolicy from ._models import GetSsisObjectMetadataRequest from ._models import IntegrationRuntime @@ -176,6 +185,7 @@ from ._models import IntegrationRuntimeVNetProperties from ._models import IpFirewallRuleInfo from ._models import IpFirewallRuleProperties + from ._models import LibraryInfo from ._models import LibraryRequirements from ._models import LinkedIntegrationRuntime from ._models import LinkedIntegrationRuntimeKeyAuthorization @@ -189,6 +199,7 @@ from ._models import ManagedIntegrationRuntimeNode from ._models import ManagedIntegrationRuntimeOperationResult from ._models import ManagedIntegrationRuntimeStatus + from ._models import ManagedVirtualNetworkSettings from ._models import MetadataSyncConfig from ._models import OperationMetaLogSpecification from ._models import OperationMetaMetricDimensionSpecification @@ -257,7 +268,10 @@ from ._models import Workspace from ._models import WorkspaceAadAdminInfo from ._models import WorkspacePatchInfo + from ._models import WorkspacePatchInfoManagedVirtualNetworkSettings from ._paged_models import BigDataPoolResourceInfoPaged +from ._paged_models import DataMaskingRulePaged +from ._paged_models import ExtendedSqlPoolBlobAuditingPolicyPaged from ._paged_models import IntegrationRuntimeResourcePaged from ._paged_models import IpFirewallRuleInfoPaged from ._paged_models import PrivateEndpointConnectionPaged @@ -266,6 +280,7 @@ from ._paged_models import ReplicationLinkPaged from ._paged_models import RestorePointPaged from ._paged_models import SensitivityLabelPaged +from ._paged_models import SqlPoolBlobAuditingPolicyPaged from ._paged_models import SqlPoolColumnPaged from ._paged_models import SqlPoolOperationPaged from ._paged_models import SqlPoolPaged @@ -295,6 +310,9 @@ VulnerabilityAssessmentScanTriggerType, VulnerabilityAssessmentScanState, SecurityAlertPolicyState, + DataMaskingState, + DataMaskingRuleState, + DataMaskingFunction, ResourceIdentityType, IntegrationRuntimeType, IntegrationRuntimeState, @@ -310,6 +328,7 @@ IntegrationRuntimeAutoUpdate, IntegrationRuntimeAuthKeyName, SsisObjectMetadataType, + SensitivityLabelSource, VulnerabilityAssessmentPolicyBaselineName, ) @@ -328,6 +347,8 @@ 'CreateSqlPoolRestorePointDefinition', 'CustomSetupBase', 'DataLakeStorageAccountDetails', + 'DataMaskingPolicy', + 'DataMaskingRule', 'DataWarehouseUserActivities', 'EntityReference', 'EnvironmentVariableSetup', @@ -335,6 +356,7 @@ 'ErrorContract', 'ErrorContractException', 'ErrorDetail', 'ErrorResponse', + 'ExtendedSqlPoolBlobAuditingPolicy', 'GeoBackupPolicy', 'GetSsisObjectMetadataRequest', 'IntegrationRuntime', @@ -356,6 +378,7 @@ 'IntegrationRuntimeVNetProperties', 'IpFirewallRuleInfo', 'IpFirewallRuleProperties', + 'LibraryInfo', 'LibraryRequirements', 'LinkedIntegrationRuntime', 'LinkedIntegrationRuntimeKeyAuthorization', @@ -369,6 +392,7 @@ 'ManagedIntegrationRuntimeNode', 'ManagedIntegrationRuntimeOperationResult', 'ManagedIntegrationRuntimeStatus', + 'ManagedVirtualNetworkSettings', 'MetadataSyncConfig', 'OperationMetaLogSpecification', 'OperationMetaMetricDimensionSpecification', @@ -437,11 +461,13 @@ 'Workspace', 'WorkspaceAadAdminInfo', 'WorkspacePatchInfo', + 'WorkspacePatchInfoManagedVirtualNetworkSettings', 'BigDataPoolResourceInfoPaged', 'IpFirewallRuleInfoPaged', 'SqlPoolPaged', 'RestorePointPaged', 'ReplicationLinkPaged', + 'SqlPoolBlobAuditingPolicyPaged', 'SqlPoolOperationPaged', 'SqlPoolUsagePaged', 'SensitivityLabelPaged', @@ -450,6 +476,8 @@ 'SqlPoolColumnPaged', 'SqlPoolVulnerabilityAssessmentPaged', 'VulnerabilityAssessmentScanRecordPaged', + 'ExtendedSqlPoolBlobAuditingPolicyPaged', + 'DataMaskingRulePaged', 'WorkspacePaged', 'IntegrationRuntimeResourcePaged', 'PrivateLinkResourcePaged', @@ -474,6 +502,9 @@ 'VulnerabilityAssessmentScanTriggerType', 'VulnerabilityAssessmentScanState', 'SecurityAlertPolicyState', + 'DataMaskingState', + 'DataMaskingRuleState', + 'DataMaskingFunction', 'ResourceIdentityType', 'IntegrationRuntimeType', 'IntegrationRuntimeState', @@ -489,5 +520,6 @@ 'IntegrationRuntimeAutoUpdate', 'IntegrationRuntimeAuthKeyName', 'SsisObjectMetadataType', + 'SensitivityLabelSource', 'VulnerabilityAssessmentPolicyBaselineName', ] diff --git a/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/models/_models.py b/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/models/_models.py index e82a36f58589..183012900d52 100644 --- a/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/models/_models.py +++ b/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/models/_models.py @@ -293,12 +293,22 @@ class BigDataPoolResourceInfo(TrackedResource): :param is_compute_isolation_enabled: Whether compute isolation is required or not. :type is_compute_isolation_enabled: bool + :param session_level_packages_enabled: Whether session level + library/package management is enabled or not. + :type session_level_packages_enabled: bool :param spark_events_folder: The Spark events folder :type spark_events_folder: str :param node_count: The number of nodes in the Big Data pool. :type node_count: int :param library_requirements: Library version requirements :type library_requirements: ~azure.mgmt.synapse.models.LibraryRequirements + :param custom_libraries: List of custom libraries/packages associated with + the spark pool. + :type custom_libraries: list[~azure.mgmt.synapse.models.LibraryInfo] + :param spark_config_properties: Spark configuration file to specify + additional properties + :type spark_config_properties: + ~azure.mgmt.synapse.models.LibraryRequirements :param spark_version: The Apache Spark version. :type spark_version: str :param default_spark_log_folder: The default folder where Spark logs will @@ -306,7 +316,7 @@ class BigDataPoolResourceInfo(TrackedResource): :type default_spark_log_folder: str :param node_size: The level of compute power that each node in the Big Data pool has. Possible values include: 'None', 'Small', 'Medium', - 'Large', 'XLarge', 'XXLarge' + 'Large', 'XLarge', 'XXLarge', 'XXXLarge' :type node_size: str or ~azure.mgmt.synapse.models.NodeSize :param node_size_family: The kind of nodes that the Big Data pool provides. Possible values include: 'None', 'MemoryOptimized' @@ -331,9 +341,12 @@ class BigDataPoolResourceInfo(TrackedResource): 'creation_date': {'key': 'properties.creationDate', 'type': 'iso-8601'}, 'auto_pause': {'key': 'properties.autoPause', 'type': 'AutoPauseProperties'}, 'is_compute_isolation_enabled': {'key': 'properties.isComputeIsolationEnabled', 'type': 'bool'}, + 'session_level_packages_enabled': {'key': 'properties.sessionLevelPackagesEnabled', 'type': 'bool'}, 'spark_events_folder': {'key': 'properties.sparkEventsFolder', 'type': 'str'}, 'node_count': {'key': 'properties.nodeCount', 'type': 'int'}, 'library_requirements': {'key': 'properties.libraryRequirements', 'type': 'LibraryRequirements'}, + 'custom_libraries': {'key': 'properties.customLibraries', 'type': '[LibraryInfo]'}, + 'spark_config_properties': {'key': 'properties.sparkConfigProperties', 'type': 'LibraryRequirements'}, 'spark_version': {'key': 'properties.sparkVersion', 'type': 'str'}, 'default_spark_log_folder': {'key': 'properties.defaultSparkLogFolder', 'type': 'str'}, 'node_size': {'key': 'properties.nodeSize', 'type': 'str'}, @@ -347,9 +360,12 @@ def __init__(self, **kwargs): self.creation_date = kwargs.get('creation_date', None) self.auto_pause = kwargs.get('auto_pause', None) self.is_compute_isolation_enabled = kwargs.get('is_compute_isolation_enabled', None) + self.session_level_packages_enabled = kwargs.get('session_level_packages_enabled', None) self.spark_events_folder = kwargs.get('spark_events_folder', None) self.node_count = kwargs.get('node_count', None) self.library_requirements = kwargs.get('library_requirements', None) + self.custom_libraries = kwargs.get('custom_libraries', None) + self.spark_config_properties = kwargs.get('spark_config_properties', None) self.spark_version = kwargs.get('spark_version', None) self.default_spark_log_folder = kwargs.get('default_spark_log_folder', None) self.node_size = kwargs.get('node_size', None) @@ -628,6 +644,197 @@ def __init__(self, **kwargs): super(ProxyResource, self).__init__(**kwargs) +class DataMaskingPolicy(ProxyResource): + """Represents a database data masking policy. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. Ex- + Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts. + :vartype type: str + :param data_masking_state: Required. The state of the data masking policy. + Possible values include: 'Disabled', 'Enabled' + :type data_masking_state: str or + ~azure.mgmt.synapse.models.DataMaskingState + :param exempt_principals: The list of the exempt principals. Specifies the + semicolon-separated list of database users for which the data masking + policy does not apply. The specified users receive data results without + masking for all of the database queries. + :type exempt_principals: str + :ivar application_principals: The list of the application principals. This + is a legacy parameter and is no longer used. + :vartype application_principals: str + :ivar masking_level: The masking level. This is a legacy parameter and is + no longer used. + :vartype masking_level: str + :ivar location: The location of the data masking policy. + :vartype location: str + :ivar kind: The kind of data masking policy. Metadata, used for Azure + portal. + :vartype kind: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'data_masking_state': {'required': True}, + 'application_principals': {'readonly': True}, + 'masking_level': {'readonly': True}, + 'location': {'readonly': True}, + 'kind': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'data_masking_state': {'key': 'properties.dataMaskingState', 'type': 'DataMaskingState'}, + 'exempt_principals': {'key': 'properties.exemptPrincipals', 'type': 'str'}, + 'application_principals': {'key': 'properties.applicationPrincipals', 'type': 'str'}, + 'masking_level': {'key': 'properties.maskingLevel', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'kind': {'key': 'kind', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(DataMaskingPolicy, self).__init__(**kwargs) + self.data_masking_state = kwargs.get('data_masking_state', None) + self.exempt_principals = kwargs.get('exempt_principals', None) + self.application_principals = None + self.masking_level = None + self.location = None + self.kind = None + + +class DataMaskingRule(ProxyResource): + """Represents a Sql pool data masking rule. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. Ex- + Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts. + :vartype type: str + :ivar data_masking_rule_id: The rule Id. + :vartype data_masking_rule_id: str + :param alias_name: The alias name. This is a legacy parameter and is no + longer used. + :type alias_name: str + :param rule_state: The rule state. Used to delete a rule. To delete an + existing rule, specify the schemaName, tableName, columnName, + maskingFunction, and specify ruleState as disabled. However, if the rule + doesn't already exist, the rule will be created with ruleState set to + enabled, regardless of the provided value of ruleState. Possible values + include: 'Disabled', 'Enabled' + :type rule_state: str or ~azure.mgmt.synapse.models.DataMaskingRuleState + :param schema_name: Required. The schema name on which the data masking + rule is applied. + :type schema_name: str + :param table_name: Required. The table name on which the data masking rule + is applied. + :type table_name: str + :param column_name: Required. The column name on which the data masking + rule is applied. + :type column_name: str + :param masking_function: Required. The masking function that is used for + the data masking rule. Possible values include: 'Default', 'CCN', 'Email', + 'Number', 'SSN', 'Text' + :type masking_function: str or + ~azure.mgmt.synapse.models.DataMaskingFunction + :param number_from: The numberFrom property of the masking rule. Required + if maskingFunction is set to Number, otherwise this parameter will be + ignored. + :type number_from: str + :param number_to: The numberTo property of the data masking rule. Required + if maskingFunction is set to Number, otherwise this parameter will be + ignored. + :type number_to: str + :param prefix_size: If maskingFunction is set to Text, the number of + characters to show unmasked in the beginning of the string. Otherwise, + this parameter will be ignored. + :type prefix_size: str + :param suffix_size: If maskingFunction is set to Text, the number of + characters to show unmasked at the end of the string. Otherwise, this + parameter will be ignored. + :type suffix_size: str + :param replacement_string: If maskingFunction is set to Text, the + character to use for masking the unexposed part of the string. Otherwise, + this parameter will be ignored. + :type replacement_string: str + :ivar location: The location of the data masking rule. + :vartype location: str + :ivar kind: The kind of Data Masking Rule. Metadata, used for Azure + portal. + :vartype kind: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'data_masking_rule_id': {'readonly': True}, + 'schema_name': {'required': True}, + 'table_name': {'required': True}, + 'column_name': {'required': True}, + 'masking_function': {'required': True}, + 'location': {'readonly': True}, + 'kind': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'data_masking_rule_id': {'key': 'properties.id', 'type': 'str'}, + 'alias_name': {'key': 'properties.aliasName', 'type': 'str'}, + 'rule_state': {'key': 'properties.ruleState', 'type': 'DataMaskingRuleState'}, + 'schema_name': {'key': 'properties.schemaName', 'type': 'str'}, + 'table_name': {'key': 'properties.tableName', 'type': 'str'}, + 'column_name': {'key': 'properties.columnName', 'type': 'str'}, + 'masking_function': {'key': 'properties.maskingFunction', 'type': 'DataMaskingFunction'}, + 'number_from': {'key': 'properties.numberFrom', 'type': 'str'}, + 'number_to': {'key': 'properties.numberTo', 'type': 'str'}, + 'prefix_size': {'key': 'properties.prefixSize', 'type': 'str'}, + 'suffix_size': {'key': 'properties.suffixSize', 'type': 'str'}, + 'replacement_string': {'key': 'properties.replacementString', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'kind': {'key': 'kind', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(DataMaskingRule, self).__init__(**kwargs) + self.data_masking_rule_id = None + self.alias_name = kwargs.get('alias_name', None) + self.rule_state = kwargs.get('rule_state', None) + self.schema_name = kwargs.get('schema_name', None) + self.table_name = kwargs.get('table_name', None) + self.column_name = kwargs.get('column_name', None) + self.masking_function = kwargs.get('masking_function', None) + self.number_from = kwargs.get('number_from', None) + self.number_to = kwargs.get('number_to', None) + self.prefix_size = kwargs.get('prefix_size', None) + self.suffix_size = kwargs.get('suffix_size', None) + self.replacement_string = kwargs.get('replacement_string', None) + self.location = None + self.kind = None + + class DataWarehouseUserActivities(ProxyResource): """User activities of a data warehouse. @@ -845,6 +1052,177 @@ def __init__(self, **kwargs): self.additional_info = None +class ExtendedSqlPoolBlobAuditingPolicy(ProxyResource): + """An extended Sql pool blob auditing policy. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. Ex- + Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts. + :vartype type: str + :param predicate_expression: Specifies condition of where clause when + creating an audit. + :type predicate_expression: str + :param state: Required. Specifies the state of the policy. If state is + Enabled, storageEndpoint or isAzureMonitorTargetEnabled are required. + Possible values include: 'Enabled', 'Disabled' + :type state: str or ~azure.mgmt.synapse.models.BlobAuditingPolicyState + :param storage_endpoint: Specifies the blob storage endpoint (e.g. + https://MyAccount.blob.core.windows.net). If state is Enabled, + storageEndpoint or isAzureMonitorTargetEnabled is required. + :type storage_endpoint: str + :param storage_account_access_key: Specifies the identifier key of the + auditing storage account. + If state is Enabled and storageEndpoint is specified, not specifying the + storageAccountAccessKey will use SQL server system-assigned managed + identity to access the storage. + Prerequisites for using managed identity authentication: + 1. Assign SQL Server a system-assigned managed identity in Azure Active + Directory (AAD). + 2. Grant SQL Server identity access to the storage account by adding + 'Storage Blob Data Contributor' RBAC role to the server identity. + For more information, see [Auditing to storage using Managed Identity + authentication](https://go.microsoft.com/fwlink/?linkid=2114355) + :type storage_account_access_key: str + :param retention_days: Specifies the number of days to keep in the audit + logs in the storage account. + :type retention_days: int + :param audit_actions_and_groups: Specifies the Actions-Groups and Actions + to audit. + The recommended set of action groups to use is the following combination - + this will audit all the queries and stored procedures executed against the + database, as well as successful and failed logins: + BATCH_COMPLETED_GROUP, + SUCCESSFUL_DATABASE_AUTHENTICATION_GROUP, + FAILED_DATABASE_AUTHENTICATION_GROUP. + This above combination is also the set that is configured by default when + enabling auditing from the Azure portal. + The supported action groups to audit are (note: choose only specific + groups that cover your auditing needs. Using unnecessary groups could lead + to very large quantities of audit records): + APPLICATION_ROLE_CHANGE_PASSWORD_GROUP + BACKUP_RESTORE_GROUP + DATABASE_LOGOUT_GROUP + DATABASE_OBJECT_CHANGE_GROUP + DATABASE_OBJECT_OWNERSHIP_CHANGE_GROUP + DATABASE_OBJECT_PERMISSION_CHANGE_GROUP + DATABASE_OPERATION_GROUP + DATABASE_PERMISSION_CHANGE_GROUP + DATABASE_PRINCIPAL_CHANGE_GROUP + DATABASE_PRINCIPAL_IMPERSONATION_GROUP + DATABASE_ROLE_MEMBER_CHANGE_GROUP + FAILED_DATABASE_AUTHENTICATION_GROUP + SCHEMA_OBJECT_ACCESS_GROUP + SCHEMA_OBJECT_CHANGE_GROUP + SCHEMA_OBJECT_OWNERSHIP_CHANGE_GROUP + SCHEMA_OBJECT_PERMISSION_CHANGE_GROUP + SUCCESSFUL_DATABASE_AUTHENTICATION_GROUP + USER_CHANGE_PASSWORD_GROUP + BATCH_STARTED_GROUP + BATCH_COMPLETED_GROUP + These are groups that cover all sql statements and stored procedures + executed against the database, and should not be used in combination with + other groups as this will result in duplicate audit logs. + For more information, see [Database-Level Audit Action + Groups](https://docs.microsoft.com/en-us/sql/relational-databases/security/auditing/sql-server-audit-action-groups-and-actions#database-level-audit-action-groups). + For Database auditing policy, specific Actions can also be specified (note + that Actions cannot be specified for Server auditing policy). The + supported actions to audit are: + SELECT + UPDATE + INSERT + DELETE + EXECUTE + RECEIVE + REFERENCES + The general form for defining an action to be audited is: + {action} ON {object} BY {principal} + Note that in the above format can refer to an object like a + table, view, or stored procedure, or an entire database or schema. For the + latter cases, the forms DATABASE::{db_name} and SCHEMA::{schema_name} are + used, respectively. + For example: + SELECT on dbo.myTable by public + SELECT on DATABASE::myDatabase by public + SELECT on SCHEMA::mySchema by public + For more information, see [Database-Level Audit + Actions](https://docs.microsoft.com/en-us/sql/relational-databases/security/auditing/sql-server-audit-action-groups-and-actions#database-level-audit-actions) + :type audit_actions_and_groups: list[str] + :param storage_account_subscription_id: Specifies the blob storage + subscription Id. + :type storage_account_subscription_id: str + :param is_storage_secondary_key_in_use: Specifies whether + storageAccountAccessKey value is the storage's secondary key. + :type is_storage_secondary_key_in_use: bool + :param is_azure_monitor_target_enabled: Specifies whether audit events are + sent to Azure Monitor. + In order to send the events to Azure Monitor, specify 'state' as 'Enabled' + and 'isAzureMonitorTargetEnabled' as true. + When using REST API to configure auditing, Diagnostic Settings with + 'SQLSecurityAuditEvents' diagnostic logs category on the database should + be also created. + Note that for server level audit you should use the 'master' database as + {databaseName}. + Diagnostic Settings URI format: + PUT + https://management.azure.com/subscriptions/{subscriptionId}/resourceGroups/{resourceGroup}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/providers/microsoft.insights/diagnosticSettings/{settingsName}?api-version=2017-05-01-preview + For more information, see [Diagnostic Settings REST + API](https://go.microsoft.com/fwlink/?linkid=2033207) + or [Diagnostic Settings + PowerShell](https://go.microsoft.com/fwlink/?linkid=2033043) + :type is_azure_monitor_target_enabled: bool + :param queue_delay_ms: Specifies the amount of time in milliseconds that + can elapse before audit actions are forced to be processed. + The default minimum value is 1000 (1 second). The maximum is + 2,147,483,647. + :type queue_delay_ms: int + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'state': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'predicate_expression': {'key': 'properties.predicateExpression', 'type': 'str'}, + 'state': {'key': 'properties.state', 'type': 'BlobAuditingPolicyState'}, + 'storage_endpoint': {'key': 'properties.storageEndpoint', 'type': 'str'}, + 'storage_account_access_key': {'key': 'properties.storageAccountAccessKey', 'type': 'str'}, + 'retention_days': {'key': 'properties.retentionDays', 'type': 'int'}, + 'audit_actions_and_groups': {'key': 'properties.auditActionsAndGroups', 'type': '[str]'}, + 'storage_account_subscription_id': {'key': 'properties.storageAccountSubscriptionId', 'type': 'str'}, + 'is_storage_secondary_key_in_use': {'key': 'properties.isStorageSecondaryKeyInUse', 'type': 'bool'}, + 'is_azure_monitor_target_enabled': {'key': 'properties.isAzureMonitorTargetEnabled', 'type': 'bool'}, + 'queue_delay_ms': {'key': 'properties.queueDelayMs', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(ExtendedSqlPoolBlobAuditingPolicy, self).__init__(**kwargs) + self.predicate_expression = kwargs.get('predicate_expression', None) + self.state = kwargs.get('state', None) + self.storage_endpoint = kwargs.get('storage_endpoint', None) + self.storage_account_access_key = kwargs.get('storage_account_access_key', None) + self.retention_days = kwargs.get('retention_days', None) + self.audit_actions_and_groups = kwargs.get('audit_actions_and_groups', None) + self.storage_account_subscription_id = kwargs.get('storage_account_subscription_id', None) + self.is_storage_secondary_key_in_use = kwargs.get('is_storage_secondary_key_in_use', None) + self.is_azure_monitor_target_enabled = kwargs.get('is_azure_monitor_target_enabled', None) + self.queue_delay_ms = kwargs.get('queue_delay_ms', None) + + class GeoBackupPolicy(ProxyResource): """A database geo backup policy. @@ -1669,6 +2047,40 @@ def __init__(self, **kwargs): self.start_ip_address = kwargs.get('start_ip_address', None) +class LibraryInfo(Model): + """Information about a library/package created at the workspace level. + + Library/package information of a Big Data pool powered by Apache Spark. + + :param name: Name of the library. + :type name: str + :param path: Storage blob path of library. + :type path: str + :param container_name: Storage blob container name. + :type container_name: str + :param uploaded_timestamp: The last update time of the library. + :type uploaded_timestamp: datetime + :param type: Type of the library. + :type type: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'path': {'key': 'path', 'type': 'str'}, + 'container_name': {'key': 'containerName', 'type': 'str'}, + 'uploaded_timestamp': {'key': 'uploadedTimestamp', 'type': 'iso-8601'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(LibraryInfo, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.path = kwargs.get('path', None) + self.container_name = kwargs.get('container_name', None) + self.uploaded_timestamp = kwargs.get('uploaded_timestamp', None) + self.type = kwargs.get('type', None) + + class LibraryRequirements(Model): """Spark pool library version requirements. @@ -2189,6 +2601,32 @@ def __init__(self, **kwargs): self.type = 'Managed' +class ManagedVirtualNetworkSettings(Model): + """Managed Virtual Network Settings. + + :param prevent_data_exfiltration: Prevent Data Exfiltration + :type prevent_data_exfiltration: bool + :param linked_access_check_on_target_resource: Linked Access Check On + Target Resource + :type linked_access_check_on_target_resource: bool + :param allowed_aad_tenant_ids_for_linking: Allowed Aad Tenant Ids For + Linking + :type allowed_aad_tenant_ids_for_linking: list[str] + """ + + _attribute_map = { + 'prevent_data_exfiltration': {'key': 'preventDataExfiltration', 'type': 'bool'}, + 'linked_access_check_on_target_resource': {'key': 'linkedAccessCheckOnTargetResource', 'type': 'bool'}, + 'allowed_aad_tenant_ids_for_linking': {'key': 'allowedAadTenantIdsForLinking', 'type': '[str]'}, + } + + def __init__(self, **kwargs): + super(ManagedVirtualNetworkSettings, self).__init__(**kwargs) + self.prevent_data_exfiltration = kwargs.get('prevent_data_exfiltration', None) + self.linked_access_check_on_target_resource = kwargs.get('linked_access_check_on_target_resource', None) + self.allowed_aad_tenant_ids_for_linking = kwargs.get('allowed_aad_tenant_ids_for_linking', None) + + class MetadataSyncConfig(ProxyResource): """Metadata sync configuration. @@ -5028,6 +5466,9 @@ class Workspace(TrackedResource): list[~azure.mgmt.synapse.models.PrivateEndpointConnection] :ivar extra_properties: Workspace level configs and feature flags :vartype extra_properties: dict[str, object] + :param managed_virtual_network_settings: Managed Virtual Network Settings + :type managed_virtual_network_settings: + ~azure.mgmt.synapse.models.ManagedVirtualNetworkSettings :param identity: Identity of the workspace :type identity: ~azure.mgmt.synapse.models.ManagedIdentity """ @@ -5057,6 +5498,7 @@ class Workspace(TrackedResource): 'managed_virtual_network': {'key': 'properties.managedVirtualNetwork', 'type': 'str'}, 'private_endpoint_connections': {'key': 'properties.privateEndpointConnections', 'type': '[PrivateEndpointConnection]'}, 'extra_properties': {'key': 'properties.extraProperties', 'type': '{object}'}, + 'managed_virtual_network_settings': {'key': 'properties.managedVirtualNetworkSettings', 'type': 'ManagedVirtualNetworkSettings'}, 'identity': {'key': 'identity', 'type': 'ManagedIdentity'}, } @@ -5072,6 +5514,7 @@ def __init__(self, **kwargs): self.managed_virtual_network = kwargs.get('managed_virtual_network', None) self.private_endpoint_connections = kwargs.get('private_endpoint_connections', None) self.extra_properties = None + self.managed_virtual_network_settings = kwargs.get('managed_virtual_network_settings', None) self.identity = kwargs.get('identity', None) @@ -5134,6 +5577,9 @@ class WorkspacePatchInfo(Model): :type tags: dict[str, str] :param identity: The identity of the workspace :type identity: ~azure.mgmt.synapse.models.ManagedIdentity + :param managed_virtual_network_settings: Managed Virtual Network Settings + :type managed_virtual_network_settings: + ~azure.mgmt.synapse.models.WorkspacePatchInfoManagedVirtualNetworkSettings :param sql_administrator_login_password: SQL administrator login password :type sql_administrator_login_password: str :ivar provisioning_state: Resource provisioning state @@ -5147,6 +5593,7 @@ class WorkspacePatchInfo(Model): _attribute_map = { 'tags': {'key': 'tags', 'type': '{str}'}, 'identity': {'key': 'identity', 'type': 'ManagedIdentity'}, + 'managed_virtual_network_settings': {'key': 'managedVirtualNetworkSettings', 'type': 'WorkspacePatchInfoManagedVirtualNetworkSettings'}, 'sql_administrator_login_password': {'key': 'properties.sqlAdministratorLoginPassword', 'type': 'str'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, } @@ -5155,5 +5602,32 @@ def __init__(self, **kwargs): super(WorkspacePatchInfo, self).__init__(**kwargs) self.tags = kwargs.get('tags', None) self.identity = kwargs.get('identity', None) + self.managed_virtual_network_settings = kwargs.get('managed_virtual_network_settings', None) self.sql_administrator_login_password = kwargs.get('sql_administrator_login_password', None) self.provisioning_state = None + + +class WorkspacePatchInfoManagedVirtualNetworkSettings(Model): + """Managed Virtual Network Settings. + + :param prevent_data_exfiltration: Prevent Data Exfiltration + :type prevent_data_exfiltration: bool + :param linked_access_check_on_target_resource: Linked Access Check On + Target Resource + :type linked_access_check_on_target_resource: bool + :param allowed_aad_tenant_ids_for_linking: Allowed Aad Tenant Ids For + Linking + :type allowed_aad_tenant_ids_for_linking: list[str] + """ + + _attribute_map = { + 'prevent_data_exfiltration': {'key': 'preventDataExfiltration', 'type': 'bool'}, + 'linked_access_check_on_target_resource': {'key': 'linkedAccessCheckOnTargetResource', 'type': 'bool'}, + 'allowed_aad_tenant_ids_for_linking': {'key': 'allowedAadTenantIdsForLinking', 'type': '[str]'}, + } + + def __init__(self, **kwargs): + super(WorkspacePatchInfoManagedVirtualNetworkSettings, self).__init__(**kwargs) + self.prevent_data_exfiltration = kwargs.get('prevent_data_exfiltration', None) + self.linked_access_check_on_target_resource = kwargs.get('linked_access_check_on_target_resource', None) + self.allowed_aad_tenant_ids_for_linking = kwargs.get('allowed_aad_tenant_ids_for_linking', None) diff --git a/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/models/_models_py3.py b/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/models/_models_py3.py index c4f72918ef88..450915909dbb 100644 --- a/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/models/_models_py3.py +++ b/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/models/_models_py3.py @@ -293,12 +293,22 @@ class BigDataPoolResourceInfo(TrackedResource): :param is_compute_isolation_enabled: Whether compute isolation is required or not. :type is_compute_isolation_enabled: bool + :param session_level_packages_enabled: Whether session level + library/package management is enabled or not. + :type session_level_packages_enabled: bool :param spark_events_folder: The Spark events folder :type spark_events_folder: str :param node_count: The number of nodes in the Big Data pool. :type node_count: int :param library_requirements: Library version requirements :type library_requirements: ~azure.mgmt.synapse.models.LibraryRequirements + :param custom_libraries: List of custom libraries/packages associated with + the spark pool. + :type custom_libraries: list[~azure.mgmt.synapse.models.LibraryInfo] + :param spark_config_properties: Spark configuration file to specify + additional properties + :type spark_config_properties: + ~azure.mgmt.synapse.models.LibraryRequirements :param spark_version: The Apache Spark version. :type spark_version: str :param default_spark_log_folder: The default folder where Spark logs will @@ -306,7 +316,7 @@ class BigDataPoolResourceInfo(TrackedResource): :type default_spark_log_folder: str :param node_size: The level of compute power that each node in the Big Data pool has. Possible values include: 'None', 'Small', 'Medium', - 'Large', 'XLarge', 'XXLarge' + 'Large', 'XLarge', 'XXLarge', 'XXXLarge' :type node_size: str or ~azure.mgmt.synapse.models.NodeSize :param node_size_family: The kind of nodes that the Big Data pool provides. Possible values include: 'None', 'MemoryOptimized' @@ -331,25 +341,31 @@ class BigDataPoolResourceInfo(TrackedResource): 'creation_date': {'key': 'properties.creationDate', 'type': 'iso-8601'}, 'auto_pause': {'key': 'properties.autoPause', 'type': 'AutoPauseProperties'}, 'is_compute_isolation_enabled': {'key': 'properties.isComputeIsolationEnabled', 'type': 'bool'}, + 'session_level_packages_enabled': {'key': 'properties.sessionLevelPackagesEnabled', 'type': 'bool'}, 'spark_events_folder': {'key': 'properties.sparkEventsFolder', 'type': 'str'}, 'node_count': {'key': 'properties.nodeCount', 'type': 'int'}, 'library_requirements': {'key': 'properties.libraryRequirements', 'type': 'LibraryRequirements'}, + 'custom_libraries': {'key': 'properties.customLibraries', 'type': '[LibraryInfo]'}, + 'spark_config_properties': {'key': 'properties.sparkConfigProperties', 'type': 'LibraryRequirements'}, 'spark_version': {'key': 'properties.sparkVersion', 'type': 'str'}, 'default_spark_log_folder': {'key': 'properties.defaultSparkLogFolder', 'type': 'str'}, 'node_size': {'key': 'properties.nodeSize', 'type': 'str'}, 'node_size_family': {'key': 'properties.nodeSizeFamily', 'type': 'str'}, } - def __init__(self, *, location: str, tags=None, provisioning_state: str=None, auto_scale=None, creation_date=None, auto_pause=None, is_compute_isolation_enabled: bool=None, spark_events_folder: str=None, node_count: int=None, library_requirements=None, spark_version: str=None, default_spark_log_folder: str=None, node_size=None, node_size_family=None, **kwargs) -> None: + def __init__(self, *, location: str, tags=None, provisioning_state: str=None, auto_scale=None, creation_date=None, auto_pause=None, is_compute_isolation_enabled: bool=None, session_level_packages_enabled: bool=None, spark_events_folder: str=None, node_count: int=None, library_requirements=None, custom_libraries=None, spark_config_properties=None, spark_version: str=None, default_spark_log_folder: str=None, node_size=None, node_size_family=None, **kwargs) -> None: super(BigDataPoolResourceInfo, self).__init__(tags=tags, location=location, **kwargs) self.provisioning_state = provisioning_state self.auto_scale = auto_scale self.creation_date = creation_date self.auto_pause = auto_pause self.is_compute_isolation_enabled = is_compute_isolation_enabled + self.session_level_packages_enabled = session_level_packages_enabled self.spark_events_folder = spark_events_folder self.node_count = node_count self.library_requirements = library_requirements + self.custom_libraries = custom_libraries + self.spark_config_properties = spark_config_properties self.spark_version = spark_version self.default_spark_log_folder = default_spark_log_folder self.node_size = node_size @@ -628,6 +644,197 @@ def __init__(self, **kwargs) -> None: super(ProxyResource, self).__init__(**kwargs) +class DataMaskingPolicy(ProxyResource): + """Represents a database data masking policy. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. Ex- + Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts. + :vartype type: str + :param data_masking_state: Required. The state of the data masking policy. + Possible values include: 'Disabled', 'Enabled' + :type data_masking_state: str or + ~azure.mgmt.synapse.models.DataMaskingState + :param exempt_principals: The list of the exempt principals. Specifies the + semicolon-separated list of database users for which the data masking + policy does not apply. The specified users receive data results without + masking for all of the database queries. + :type exempt_principals: str + :ivar application_principals: The list of the application principals. This + is a legacy parameter and is no longer used. + :vartype application_principals: str + :ivar masking_level: The masking level. This is a legacy parameter and is + no longer used. + :vartype masking_level: str + :ivar location: The location of the data masking policy. + :vartype location: str + :ivar kind: The kind of data masking policy. Metadata, used for Azure + portal. + :vartype kind: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'data_masking_state': {'required': True}, + 'application_principals': {'readonly': True}, + 'masking_level': {'readonly': True}, + 'location': {'readonly': True}, + 'kind': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'data_masking_state': {'key': 'properties.dataMaskingState', 'type': 'DataMaskingState'}, + 'exempt_principals': {'key': 'properties.exemptPrincipals', 'type': 'str'}, + 'application_principals': {'key': 'properties.applicationPrincipals', 'type': 'str'}, + 'masking_level': {'key': 'properties.maskingLevel', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'kind': {'key': 'kind', 'type': 'str'}, + } + + def __init__(self, *, data_masking_state, exempt_principals: str=None, **kwargs) -> None: + super(DataMaskingPolicy, self).__init__(**kwargs) + self.data_masking_state = data_masking_state + self.exempt_principals = exempt_principals + self.application_principals = None + self.masking_level = None + self.location = None + self.kind = None + + +class DataMaskingRule(ProxyResource): + """Represents a Sql pool data masking rule. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. Ex- + Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts. + :vartype type: str + :ivar data_masking_rule_id: The rule Id. + :vartype data_masking_rule_id: str + :param alias_name: The alias name. This is a legacy parameter and is no + longer used. + :type alias_name: str + :param rule_state: The rule state. Used to delete a rule. To delete an + existing rule, specify the schemaName, tableName, columnName, + maskingFunction, and specify ruleState as disabled. However, if the rule + doesn't already exist, the rule will be created with ruleState set to + enabled, regardless of the provided value of ruleState. Possible values + include: 'Disabled', 'Enabled' + :type rule_state: str or ~azure.mgmt.synapse.models.DataMaskingRuleState + :param schema_name: Required. The schema name on which the data masking + rule is applied. + :type schema_name: str + :param table_name: Required. The table name on which the data masking rule + is applied. + :type table_name: str + :param column_name: Required. The column name on which the data masking + rule is applied. + :type column_name: str + :param masking_function: Required. The masking function that is used for + the data masking rule. Possible values include: 'Default', 'CCN', 'Email', + 'Number', 'SSN', 'Text' + :type masking_function: str or + ~azure.mgmt.synapse.models.DataMaskingFunction + :param number_from: The numberFrom property of the masking rule. Required + if maskingFunction is set to Number, otherwise this parameter will be + ignored. + :type number_from: str + :param number_to: The numberTo property of the data masking rule. Required + if maskingFunction is set to Number, otherwise this parameter will be + ignored. + :type number_to: str + :param prefix_size: If maskingFunction is set to Text, the number of + characters to show unmasked in the beginning of the string. Otherwise, + this parameter will be ignored. + :type prefix_size: str + :param suffix_size: If maskingFunction is set to Text, the number of + characters to show unmasked at the end of the string. Otherwise, this + parameter will be ignored. + :type suffix_size: str + :param replacement_string: If maskingFunction is set to Text, the + character to use for masking the unexposed part of the string. Otherwise, + this parameter will be ignored. + :type replacement_string: str + :ivar location: The location of the data masking rule. + :vartype location: str + :ivar kind: The kind of Data Masking Rule. Metadata, used for Azure + portal. + :vartype kind: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'data_masking_rule_id': {'readonly': True}, + 'schema_name': {'required': True}, + 'table_name': {'required': True}, + 'column_name': {'required': True}, + 'masking_function': {'required': True}, + 'location': {'readonly': True}, + 'kind': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'data_masking_rule_id': {'key': 'properties.id', 'type': 'str'}, + 'alias_name': {'key': 'properties.aliasName', 'type': 'str'}, + 'rule_state': {'key': 'properties.ruleState', 'type': 'DataMaskingRuleState'}, + 'schema_name': {'key': 'properties.schemaName', 'type': 'str'}, + 'table_name': {'key': 'properties.tableName', 'type': 'str'}, + 'column_name': {'key': 'properties.columnName', 'type': 'str'}, + 'masking_function': {'key': 'properties.maskingFunction', 'type': 'DataMaskingFunction'}, + 'number_from': {'key': 'properties.numberFrom', 'type': 'str'}, + 'number_to': {'key': 'properties.numberTo', 'type': 'str'}, + 'prefix_size': {'key': 'properties.prefixSize', 'type': 'str'}, + 'suffix_size': {'key': 'properties.suffixSize', 'type': 'str'}, + 'replacement_string': {'key': 'properties.replacementString', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'kind': {'key': 'kind', 'type': 'str'}, + } + + def __init__(self, *, schema_name: str, table_name: str, column_name: str, masking_function, alias_name: str=None, rule_state=None, number_from: str=None, number_to: str=None, prefix_size: str=None, suffix_size: str=None, replacement_string: str=None, **kwargs) -> None: + super(DataMaskingRule, self).__init__(**kwargs) + self.data_masking_rule_id = None + self.alias_name = alias_name + self.rule_state = rule_state + self.schema_name = schema_name + self.table_name = table_name + self.column_name = column_name + self.masking_function = masking_function + self.number_from = number_from + self.number_to = number_to + self.prefix_size = prefix_size + self.suffix_size = suffix_size + self.replacement_string = replacement_string + self.location = None + self.kind = None + + class DataWarehouseUserActivities(ProxyResource): """User activities of a data warehouse. @@ -845,6 +1052,177 @@ def __init__(self, **kwargs) -> None: self.additional_info = None +class ExtendedSqlPoolBlobAuditingPolicy(ProxyResource): + """An extended Sql pool blob auditing policy. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. Ex- + Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts. + :vartype type: str + :param predicate_expression: Specifies condition of where clause when + creating an audit. + :type predicate_expression: str + :param state: Required. Specifies the state of the policy. If state is + Enabled, storageEndpoint or isAzureMonitorTargetEnabled are required. + Possible values include: 'Enabled', 'Disabled' + :type state: str or ~azure.mgmt.synapse.models.BlobAuditingPolicyState + :param storage_endpoint: Specifies the blob storage endpoint (e.g. + https://MyAccount.blob.core.windows.net). If state is Enabled, + storageEndpoint or isAzureMonitorTargetEnabled is required. + :type storage_endpoint: str + :param storage_account_access_key: Specifies the identifier key of the + auditing storage account. + If state is Enabled and storageEndpoint is specified, not specifying the + storageAccountAccessKey will use SQL server system-assigned managed + identity to access the storage. + Prerequisites for using managed identity authentication: + 1. Assign SQL Server a system-assigned managed identity in Azure Active + Directory (AAD). + 2. Grant SQL Server identity access to the storage account by adding + 'Storage Blob Data Contributor' RBAC role to the server identity. + For more information, see [Auditing to storage using Managed Identity + authentication](https://go.microsoft.com/fwlink/?linkid=2114355) + :type storage_account_access_key: str + :param retention_days: Specifies the number of days to keep in the audit + logs in the storage account. + :type retention_days: int + :param audit_actions_and_groups: Specifies the Actions-Groups and Actions + to audit. + The recommended set of action groups to use is the following combination - + this will audit all the queries and stored procedures executed against the + database, as well as successful and failed logins: + BATCH_COMPLETED_GROUP, + SUCCESSFUL_DATABASE_AUTHENTICATION_GROUP, + FAILED_DATABASE_AUTHENTICATION_GROUP. + This above combination is also the set that is configured by default when + enabling auditing from the Azure portal. + The supported action groups to audit are (note: choose only specific + groups that cover your auditing needs. Using unnecessary groups could lead + to very large quantities of audit records): + APPLICATION_ROLE_CHANGE_PASSWORD_GROUP + BACKUP_RESTORE_GROUP + DATABASE_LOGOUT_GROUP + DATABASE_OBJECT_CHANGE_GROUP + DATABASE_OBJECT_OWNERSHIP_CHANGE_GROUP + DATABASE_OBJECT_PERMISSION_CHANGE_GROUP + DATABASE_OPERATION_GROUP + DATABASE_PERMISSION_CHANGE_GROUP + DATABASE_PRINCIPAL_CHANGE_GROUP + DATABASE_PRINCIPAL_IMPERSONATION_GROUP + DATABASE_ROLE_MEMBER_CHANGE_GROUP + FAILED_DATABASE_AUTHENTICATION_GROUP + SCHEMA_OBJECT_ACCESS_GROUP + SCHEMA_OBJECT_CHANGE_GROUP + SCHEMA_OBJECT_OWNERSHIP_CHANGE_GROUP + SCHEMA_OBJECT_PERMISSION_CHANGE_GROUP + SUCCESSFUL_DATABASE_AUTHENTICATION_GROUP + USER_CHANGE_PASSWORD_GROUP + BATCH_STARTED_GROUP + BATCH_COMPLETED_GROUP + These are groups that cover all sql statements and stored procedures + executed against the database, and should not be used in combination with + other groups as this will result in duplicate audit logs. + For more information, see [Database-Level Audit Action + Groups](https://docs.microsoft.com/en-us/sql/relational-databases/security/auditing/sql-server-audit-action-groups-and-actions#database-level-audit-action-groups). + For Database auditing policy, specific Actions can also be specified (note + that Actions cannot be specified for Server auditing policy). The + supported actions to audit are: + SELECT + UPDATE + INSERT + DELETE + EXECUTE + RECEIVE + REFERENCES + The general form for defining an action to be audited is: + {action} ON {object} BY {principal} + Note that in the above format can refer to an object like a + table, view, or stored procedure, or an entire database or schema. For the + latter cases, the forms DATABASE::{db_name} and SCHEMA::{schema_name} are + used, respectively. + For example: + SELECT on dbo.myTable by public + SELECT on DATABASE::myDatabase by public + SELECT on SCHEMA::mySchema by public + For more information, see [Database-Level Audit + Actions](https://docs.microsoft.com/en-us/sql/relational-databases/security/auditing/sql-server-audit-action-groups-and-actions#database-level-audit-actions) + :type audit_actions_and_groups: list[str] + :param storage_account_subscription_id: Specifies the blob storage + subscription Id. + :type storage_account_subscription_id: str + :param is_storage_secondary_key_in_use: Specifies whether + storageAccountAccessKey value is the storage's secondary key. + :type is_storage_secondary_key_in_use: bool + :param is_azure_monitor_target_enabled: Specifies whether audit events are + sent to Azure Monitor. + In order to send the events to Azure Monitor, specify 'state' as 'Enabled' + and 'isAzureMonitorTargetEnabled' as true. + When using REST API to configure auditing, Diagnostic Settings with + 'SQLSecurityAuditEvents' diagnostic logs category on the database should + be also created. + Note that for server level audit you should use the 'master' database as + {databaseName}. + Diagnostic Settings URI format: + PUT + https://management.azure.com/subscriptions/{subscriptionId}/resourceGroups/{resourceGroup}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/providers/microsoft.insights/diagnosticSettings/{settingsName}?api-version=2017-05-01-preview + For more information, see [Diagnostic Settings REST + API](https://go.microsoft.com/fwlink/?linkid=2033207) + or [Diagnostic Settings + PowerShell](https://go.microsoft.com/fwlink/?linkid=2033043) + :type is_azure_monitor_target_enabled: bool + :param queue_delay_ms: Specifies the amount of time in milliseconds that + can elapse before audit actions are forced to be processed. + The default minimum value is 1000 (1 second). The maximum is + 2,147,483,647. + :type queue_delay_ms: int + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'state': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'predicate_expression': {'key': 'properties.predicateExpression', 'type': 'str'}, + 'state': {'key': 'properties.state', 'type': 'BlobAuditingPolicyState'}, + 'storage_endpoint': {'key': 'properties.storageEndpoint', 'type': 'str'}, + 'storage_account_access_key': {'key': 'properties.storageAccountAccessKey', 'type': 'str'}, + 'retention_days': {'key': 'properties.retentionDays', 'type': 'int'}, + 'audit_actions_and_groups': {'key': 'properties.auditActionsAndGroups', 'type': '[str]'}, + 'storage_account_subscription_id': {'key': 'properties.storageAccountSubscriptionId', 'type': 'str'}, + 'is_storage_secondary_key_in_use': {'key': 'properties.isStorageSecondaryKeyInUse', 'type': 'bool'}, + 'is_azure_monitor_target_enabled': {'key': 'properties.isAzureMonitorTargetEnabled', 'type': 'bool'}, + 'queue_delay_ms': {'key': 'properties.queueDelayMs', 'type': 'int'}, + } + + def __init__(self, *, state, predicate_expression: str=None, storage_endpoint: str=None, storage_account_access_key: str=None, retention_days: int=None, audit_actions_and_groups=None, storage_account_subscription_id: str=None, is_storage_secondary_key_in_use: bool=None, is_azure_monitor_target_enabled: bool=None, queue_delay_ms: int=None, **kwargs) -> None: + super(ExtendedSqlPoolBlobAuditingPolicy, self).__init__(**kwargs) + self.predicate_expression = predicate_expression + self.state = state + self.storage_endpoint = storage_endpoint + self.storage_account_access_key = storage_account_access_key + self.retention_days = retention_days + self.audit_actions_and_groups = audit_actions_and_groups + self.storage_account_subscription_id = storage_account_subscription_id + self.is_storage_secondary_key_in_use = is_storage_secondary_key_in_use + self.is_azure_monitor_target_enabled = is_azure_monitor_target_enabled + self.queue_delay_ms = queue_delay_ms + + class GeoBackupPolicy(ProxyResource): """A database geo backup policy. @@ -1669,6 +2047,40 @@ def __init__(self, *, end_ip_address: str=None, start_ip_address: str=None, **kw self.start_ip_address = start_ip_address +class LibraryInfo(Model): + """Information about a library/package created at the workspace level. + + Library/package information of a Big Data pool powered by Apache Spark. + + :param name: Name of the library. + :type name: str + :param path: Storage blob path of library. + :type path: str + :param container_name: Storage blob container name. + :type container_name: str + :param uploaded_timestamp: The last update time of the library. + :type uploaded_timestamp: datetime + :param type: Type of the library. + :type type: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'path': {'key': 'path', 'type': 'str'}, + 'container_name': {'key': 'containerName', 'type': 'str'}, + 'uploaded_timestamp': {'key': 'uploadedTimestamp', 'type': 'iso-8601'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, name: str=None, path: str=None, container_name: str=None, uploaded_timestamp=None, type: str=None, **kwargs) -> None: + super(LibraryInfo, self).__init__(**kwargs) + self.name = name + self.path = path + self.container_name = container_name + self.uploaded_timestamp = uploaded_timestamp + self.type = type + + class LibraryRequirements(Model): """Spark pool library version requirements. @@ -2189,6 +2601,32 @@ def __init__(self, *, additional_properties=None, **kwargs) -> None: self.type = 'Managed' +class ManagedVirtualNetworkSettings(Model): + """Managed Virtual Network Settings. + + :param prevent_data_exfiltration: Prevent Data Exfiltration + :type prevent_data_exfiltration: bool + :param linked_access_check_on_target_resource: Linked Access Check On + Target Resource + :type linked_access_check_on_target_resource: bool + :param allowed_aad_tenant_ids_for_linking: Allowed Aad Tenant Ids For + Linking + :type allowed_aad_tenant_ids_for_linking: list[str] + """ + + _attribute_map = { + 'prevent_data_exfiltration': {'key': 'preventDataExfiltration', 'type': 'bool'}, + 'linked_access_check_on_target_resource': {'key': 'linkedAccessCheckOnTargetResource', 'type': 'bool'}, + 'allowed_aad_tenant_ids_for_linking': {'key': 'allowedAadTenantIdsForLinking', 'type': '[str]'}, + } + + def __init__(self, *, prevent_data_exfiltration: bool=None, linked_access_check_on_target_resource: bool=None, allowed_aad_tenant_ids_for_linking=None, **kwargs) -> None: + super(ManagedVirtualNetworkSettings, self).__init__(**kwargs) + self.prevent_data_exfiltration = prevent_data_exfiltration + self.linked_access_check_on_target_resource = linked_access_check_on_target_resource + self.allowed_aad_tenant_ids_for_linking = allowed_aad_tenant_ids_for_linking + + class MetadataSyncConfig(ProxyResource): """Metadata sync configuration. @@ -5028,6 +5466,9 @@ class Workspace(TrackedResource): list[~azure.mgmt.synapse.models.PrivateEndpointConnection] :ivar extra_properties: Workspace level configs and feature flags :vartype extra_properties: dict[str, object] + :param managed_virtual_network_settings: Managed Virtual Network Settings + :type managed_virtual_network_settings: + ~azure.mgmt.synapse.models.ManagedVirtualNetworkSettings :param identity: Identity of the workspace :type identity: ~azure.mgmt.synapse.models.ManagedIdentity """ @@ -5057,10 +5498,11 @@ class Workspace(TrackedResource): 'managed_virtual_network': {'key': 'properties.managedVirtualNetwork', 'type': 'str'}, 'private_endpoint_connections': {'key': 'properties.privateEndpointConnections', 'type': '[PrivateEndpointConnection]'}, 'extra_properties': {'key': 'properties.extraProperties', 'type': '{object}'}, + 'managed_virtual_network_settings': {'key': 'properties.managedVirtualNetworkSettings', 'type': 'ManagedVirtualNetworkSettings'}, 'identity': {'key': 'identity', 'type': 'ManagedIdentity'}, } - def __init__(self, *, location: str, tags=None, default_data_lake_storage=None, sql_administrator_login_password: str=None, managed_resource_group_name: str=None, sql_administrator_login: str=None, virtual_network_profile=None, connectivity_endpoints=None, managed_virtual_network: str=None, private_endpoint_connections=None, identity=None, **kwargs) -> None: + def __init__(self, *, location: str, tags=None, default_data_lake_storage=None, sql_administrator_login_password: str=None, managed_resource_group_name: str=None, sql_administrator_login: str=None, virtual_network_profile=None, connectivity_endpoints=None, managed_virtual_network: str=None, private_endpoint_connections=None, managed_virtual_network_settings=None, identity=None, **kwargs) -> None: super(Workspace, self).__init__(tags=tags, location=location, **kwargs) self.default_data_lake_storage = default_data_lake_storage self.sql_administrator_login_password = sql_administrator_login_password @@ -5072,6 +5514,7 @@ def __init__(self, *, location: str, tags=None, default_data_lake_storage=None, self.managed_virtual_network = managed_virtual_network self.private_endpoint_connections = private_endpoint_connections self.extra_properties = None + self.managed_virtual_network_settings = managed_virtual_network_settings self.identity = identity @@ -5134,6 +5577,9 @@ class WorkspacePatchInfo(Model): :type tags: dict[str, str] :param identity: The identity of the workspace :type identity: ~azure.mgmt.synapse.models.ManagedIdentity + :param managed_virtual_network_settings: Managed Virtual Network Settings + :type managed_virtual_network_settings: + ~azure.mgmt.synapse.models.WorkspacePatchInfoManagedVirtualNetworkSettings :param sql_administrator_login_password: SQL administrator login password :type sql_administrator_login_password: str :ivar provisioning_state: Resource provisioning state @@ -5147,13 +5593,41 @@ class WorkspacePatchInfo(Model): _attribute_map = { 'tags': {'key': 'tags', 'type': '{str}'}, 'identity': {'key': 'identity', 'type': 'ManagedIdentity'}, + 'managed_virtual_network_settings': {'key': 'managedVirtualNetworkSettings', 'type': 'WorkspacePatchInfoManagedVirtualNetworkSettings'}, 'sql_administrator_login_password': {'key': 'properties.sqlAdministratorLoginPassword', 'type': 'str'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, } - def __init__(self, *, tags=None, identity=None, sql_administrator_login_password: str=None, **kwargs) -> None: + def __init__(self, *, tags=None, identity=None, managed_virtual_network_settings=None, sql_administrator_login_password: str=None, **kwargs) -> None: super(WorkspacePatchInfo, self).__init__(**kwargs) self.tags = tags self.identity = identity + self.managed_virtual_network_settings = managed_virtual_network_settings self.sql_administrator_login_password = sql_administrator_login_password self.provisioning_state = None + + +class WorkspacePatchInfoManagedVirtualNetworkSettings(Model): + """Managed Virtual Network Settings. + + :param prevent_data_exfiltration: Prevent Data Exfiltration + :type prevent_data_exfiltration: bool + :param linked_access_check_on_target_resource: Linked Access Check On + Target Resource + :type linked_access_check_on_target_resource: bool + :param allowed_aad_tenant_ids_for_linking: Allowed Aad Tenant Ids For + Linking + :type allowed_aad_tenant_ids_for_linking: list[str] + """ + + _attribute_map = { + 'prevent_data_exfiltration': {'key': 'preventDataExfiltration', 'type': 'bool'}, + 'linked_access_check_on_target_resource': {'key': 'linkedAccessCheckOnTargetResource', 'type': 'bool'}, + 'allowed_aad_tenant_ids_for_linking': {'key': 'allowedAadTenantIdsForLinking', 'type': '[str]'}, + } + + def __init__(self, *, prevent_data_exfiltration: bool=None, linked_access_check_on_target_resource: bool=None, allowed_aad_tenant_ids_for_linking=None, **kwargs) -> None: + super(WorkspacePatchInfoManagedVirtualNetworkSettings, self).__init__(**kwargs) + self.prevent_data_exfiltration = prevent_data_exfiltration + self.linked_access_check_on_target_resource = linked_access_check_on_target_resource + self.allowed_aad_tenant_ids_for_linking = allowed_aad_tenant_ids_for_linking diff --git a/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/models/_paged_models.py b/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/models/_paged_models.py index 707152412f3b..9270afdc2ca7 100644 --- a/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/models/_paged_models.py +++ b/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/models/_paged_models.py @@ -77,6 +77,19 @@ class ReplicationLinkPaged(Paged): def __init__(self, *args, **kwargs): super(ReplicationLinkPaged, self).__init__(*args, **kwargs) +class SqlPoolBlobAuditingPolicyPaged(Paged): + """ + A paging container for iterating over a list of :class:`SqlPoolBlobAuditingPolicy ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[SqlPoolBlobAuditingPolicy]'} + } + + def __init__(self, *args, **kwargs): + + super(SqlPoolBlobAuditingPolicyPaged, self).__init__(*args, **kwargs) class SqlPoolOperationPaged(Paged): """ A paging container for iterating over a list of :class:`SqlPoolOperation ` object @@ -181,6 +194,32 @@ class VulnerabilityAssessmentScanRecordPaged(Paged): def __init__(self, *args, **kwargs): super(VulnerabilityAssessmentScanRecordPaged, self).__init__(*args, **kwargs) +class ExtendedSqlPoolBlobAuditingPolicyPaged(Paged): + """ + A paging container for iterating over a list of :class:`ExtendedSqlPoolBlobAuditingPolicy ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[ExtendedSqlPoolBlobAuditingPolicy]'} + } + + def __init__(self, *args, **kwargs): + + super(ExtendedSqlPoolBlobAuditingPolicyPaged, self).__init__(*args, **kwargs) +class DataMaskingRulePaged(Paged): + """ + A paging container for iterating over a list of :class:`DataMaskingRule ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[DataMaskingRule]'} + } + + def __init__(self, *args, **kwargs): + + super(DataMaskingRulePaged, self).__init__(*args, **kwargs) class WorkspacePaged(Paged): """ A paging container for iterating over a list of :class:`Workspace ` object diff --git a/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/models/_synapse_management_client_enums.py b/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/models/_synapse_management_client_enums.py index 1f354d19a474..79dd8590ba62 100644 --- a/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/models/_synapse_management_client_enums.py +++ b/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/models/_synapse_management_client_enums.py @@ -20,6 +20,7 @@ class NodeSize(str, Enum): large = "Large" xlarge = "XLarge" xx_large = "XXLarge" + xxx_large = "XXXLarge" class NodeSizeFamily(str, Enum): @@ -188,6 +189,28 @@ class SecurityAlertPolicyState(str, Enum): disabled = "Disabled" +class DataMaskingState(str, Enum): + + disabled = "Disabled" + enabled = "Enabled" + + +class DataMaskingRuleState(str, Enum): + + disabled = "Disabled" + enabled = "Enabled" + + +class DataMaskingFunction(str, Enum): + + default = "Default" + ccn = "CCN" + email = "Email" + number = "Number" + ssn = "SSN" + text = "Text" + + class ResourceIdentityType(str, Enum): none = "None" @@ -300,6 +323,12 @@ class SsisObjectMetadataType(str, Enum): environment = "Environment" +class SensitivityLabelSource(str, Enum): + + current = "current" + recommended = "recommended" + + class VulnerabilityAssessmentPolicyBaselineName(str, Enum): master = "master" diff --git a/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/operations/__init__.py b/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/operations/__init__.py index 5c164434e399..b94fff90f4e8 100644 --- a/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/operations/__init__.py +++ b/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/operations/__init__.py @@ -32,6 +32,10 @@ from ._sql_pool_vulnerability_assessment_scans_operations import SqlPoolVulnerabilityAssessmentScansOperations from ._sql_pool_security_alert_policies_operations import SqlPoolSecurityAlertPoliciesOperations from ._sql_pool_vulnerability_assessment_rule_baselines_operations import SqlPoolVulnerabilityAssessmentRuleBaselinesOperations +from ._extended_sql_pool_blob_auditing_policies_operations import ExtendedSqlPoolBlobAuditingPoliciesOperations +from ._data_masking_policies_operations import DataMaskingPoliciesOperations +from ._data_masking_rules_operations import DataMaskingRulesOperations +from ._sql_pool_columns_operations import SqlPoolColumnsOperations from ._workspaces_operations import WorkspacesOperations from ._workspace_aad_admins_operations import WorkspaceAadAdminsOperations from ._workspace_managed_identity_sql_control_settings_operations import WorkspaceManagedIdentitySqlControlSettingsOperations @@ -72,6 +76,10 @@ 'SqlPoolVulnerabilityAssessmentScansOperations', 'SqlPoolSecurityAlertPoliciesOperations', 'SqlPoolVulnerabilityAssessmentRuleBaselinesOperations', + 'ExtendedSqlPoolBlobAuditingPoliciesOperations', + 'DataMaskingPoliciesOperations', + 'DataMaskingRulesOperations', + 'SqlPoolColumnsOperations', 'WorkspacesOperations', 'WorkspaceAadAdminsOperations', 'WorkspaceManagedIdentitySqlControlSettingsOperations', diff --git a/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/operations/_data_masking_policies_operations.py b/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/operations/_data_masking_policies_operations.py new file mode 100644 index 000000000000..36324449ef68 --- /dev/null +++ b/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/operations/_data_masking_policies_operations.py @@ -0,0 +1,189 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse +from msrestazure.azure_exceptions import CloudError + +from .. import models + + +class DataMaskingPoliciesOperations(object): + """DataMaskingPoliciesOperations operations. + + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The API version to use for this operation. Constant value: "2019-06-01-preview". + :ivar data_masking_policy_name: The name of the data masking policy for which the masking rule applies. Constant value: "Default". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2019-06-01-preview" + self.data_masking_policy_name = "Default" + + self.config = config + + def create_or_update( + self, resource_group_name, workspace_name, sql_pool_name, data_masking_state, exempt_principals=None, custom_headers=None, raw=False, **operation_config): + """Creates or updates a Sql pool data masking policy. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace + :type workspace_name: str + :param sql_pool_name: SQL pool name + :type sql_pool_name: str + :param data_masking_state: The state of the data masking policy. + Possible values include: 'Disabled', 'Enabled' + :type data_masking_state: str or + ~azure.mgmt.synapse.models.DataMaskingState + :param exempt_principals: The list of the exempt principals. Specifies + the semicolon-separated list of database users for which the data + masking policy does not apply. The specified users receive data + results without masking for all of the database queries. + :type exempt_principals: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: DataMaskingPolicy or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.synapse.models.DataMaskingPolicy or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + parameters = models.DataMaskingPolicy(data_masking_state=data_masking_state, exempt_principals=exempt_principals) + + # Construct URL + url = self.create_or_update.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'sqlPoolName': self._serialize.url("sql_pool_name", sql_pool_name, 'str'), + 'dataMaskingPolicyName': self._serialize.url("self.data_masking_policy_name", self.data_masking_policy_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(parameters, 'DataMaskingPolicy') + + # Construct and send request + request = self._client.put(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('DataMaskingPolicy', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/sqlPools/{sqlPoolName}/dataMaskingPolicies/{dataMaskingPolicyName}'} + + def get( + self, resource_group_name, workspace_name, sql_pool_name, custom_headers=None, raw=False, **operation_config): + """Gets a Sql pool data masking policy. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace + :type workspace_name: str + :param sql_pool_name: SQL pool name + :type sql_pool_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: DataMaskingPolicy or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.synapse.models.DataMaskingPolicy or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'sqlPoolName': self._serialize.url("sql_pool_name", sql_pool_name, 'str'), + 'dataMaskingPolicyName': self._serialize.url("self.data_masking_policy_name", self.data_masking_policy_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('DataMaskingPolicy', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/sqlPools/{sqlPoolName}/dataMaskingPolicies/{dataMaskingPolicyName}'} diff --git a/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/operations/_data_masking_rules_operations.py b/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/operations/_data_masking_rules_operations.py new file mode 100644 index 000000000000..29593198a344 --- /dev/null +++ b/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/operations/_data_masking_rules_operations.py @@ -0,0 +1,197 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse +from msrestazure.azure_exceptions import CloudError + +from .. import models + + +class DataMaskingRulesOperations(object): + """DataMaskingRulesOperations operations. + + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The API version to use for this operation. Constant value: "2019-06-01-preview". + :ivar data_masking_policy_name: The name of the data masking policy for which the masking rule applies. Constant value: "Default". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2019-06-01-preview" + self.data_masking_policy_name = "Default" + + self.config = config + + def create_or_update( + self, resource_group_name, workspace_name, sql_pool_name, data_masking_rule_name, parameters, custom_headers=None, raw=False, **operation_config): + """Creates or updates a Sql pool data masking rule. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace + :type workspace_name: str + :param sql_pool_name: SQL pool name + :type sql_pool_name: str + :param data_masking_rule_name: The name of the data masking rule. + :type data_masking_rule_name: str + :param parameters: The required parameters for creating or updating a + data masking rule. + :type parameters: ~azure.mgmt.synapse.models.DataMaskingRule + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: DataMaskingRule or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.synapse.models.DataMaskingRule or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.create_or_update.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'sqlPoolName': self._serialize.url("sql_pool_name", sql_pool_name, 'str'), + 'dataMaskingPolicyName': self._serialize.url("self.data_masking_policy_name", self.data_masking_policy_name, 'str'), + 'dataMaskingRuleName': self._serialize.url("data_masking_rule_name", data_masking_rule_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(parameters, 'DataMaskingRule') + + # Construct and send request + request = self._client.put(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 201]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('DataMaskingRule', response) + if response.status_code == 201: + deserialized = self._deserialize('DataMaskingRule', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/sqlPools/{sqlPoolName}/dataMaskingPolicies/{dataMaskingPolicyName}/rules/{dataMaskingRuleName}'} + + def list_by_sql_pool( + self, resource_group_name, workspace_name, sql_pool_name, custom_headers=None, raw=False, **operation_config): + """Gets a list of Sql pool data masking rules. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace + :type workspace_name: str + :param sql_pool_name: SQL pool name + :type sql_pool_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of DataMaskingRule + :rtype: + ~azure.mgmt.synapse.models.DataMaskingRulePaged[~azure.mgmt.synapse.models.DataMaskingRule] + :raises: :class:`CloudError` + """ + def prepare_request(next_link=None): + if not next_link: + # Construct URL + url = self.list_by_sql_pool.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'sqlPoolName': self._serialize.url("sql_pool_name", sql_pool_name, 'str'), + 'dataMaskingPolicyName': self._serialize.url("self.data_masking_policy_name", self.data_masking_policy_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + return request + + def internal_paging(next_link=None): + request = prepare_request(next_link) + + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + header_dict = None + if raw: + header_dict = {} + deserialized = models.DataMaskingRulePaged(internal_paging, self._deserialize.dependencies, header_dict) + + return deserialized + list_by_sql_pool.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/sqlPools/{sqlPoolName}/dataMaskingPolicies/{dataMaskingPolicyName}/rules'} diff --git a/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/operations/_extended_sql_pool_blob_auditing_policies_operations.py b/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/operations/_extended_sql_pool_blob_auditing_policies_operations.py new file mode 100644 index 000000000000..9e7ddf27809f --- /dev/null +++ b/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/operations/_extended_sql_pool_blob_auditing_policies_operations.py @@ -0,0 +1,262 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse +from msrestazure.azure_exceptions import CloudError + +from .. import models + + +class ExtendedSqlPoolBlobAuditingPoliciesOperations(object): + """ExtendedSqlPoolBlobAuditingPoliciesOperations operations. + + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The API version to use for this operation. Constant value: "2019-06-01-preview". + :ivar blob_auditing_policy_name: The name of the blob auditing policy. Constant value: "default". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2019-06-01-preview" + self.blob_auditing_policy_name = "default" + + self.config = config + + def get( + self, resource_group_name, workspace_name, sql_pool_name, custom_headers=None, raw=False, **operation_config): + """Gets an extended Sql pool's blob auditing policy. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace + :type workspace_name: str + :param sql_pool_name: SQL pool name + :type sql_pool_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: ExtendedSqlPoolBlobAuditingPolicy or ClientRawResponse if + raw=true + :rtype: ~azure.mgmt.synapse.models.ExtendedSqlPoolBlobAuditingPolicy + or ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'sqlPoolName': self._serialize.url("sql_pool_name", sql_pool_name, 'str'), + 'blobAuditingPolicyName': self._serialize.url("self.blob_auditing_policy_name", self.blob_auditing_policy_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('ExtendedSqlPoolBlobAuditingPolicy', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/sqlPools/{sqlPoolName}/extendedAuditingSettings/{blobAuditingPolicyName}'} + + def create_or_update( + self, resource_group_name, workspace_name, sql_pool_name, parameters, custom_headers=None, raw=False, **operation_config): + """Creates or updates an extended Sql pool's blob auditing policy. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace + :type workspace_name: str + :param sql_pool_name: SQL pool name + :type sql_pool_name: str + :param parameters: The extended Sql pool blob auditing policy. + :type parameters: + ~azure.mgmt.synapse.models.ExtendedSqlPoolBlobAuditingPolicy + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: ExtendedSqlPoolBlobAuditingPolicy or ClientRawResponse if + raw=true + :rtype: ~azure.mgmt.synapse.models.ExtendedSqlPoolBlobAuditingPolicy + or ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.create_or_update.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'sqlPoolName': self._serialize.url("sql_pool_name", sql_pool_name, 'str'), + 'blobAuditingPolicyName': self._serialize.url("self.blob_auditing_policy_name", self.blob_auditing_policy_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(parameters, 'ExtendedSqlPoolBlobAuditingPolicy') + + # Construct and send request + request = self._client.put(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 201]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('ExtendedSqlPoolBlobAuditingPolicy', response) + if response.status_code == 201: + deserialized = self._deserialize('ExtendedSqlPoolBlobAuditingPolicy', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/sqlPools/{sqlPoolName}/extendedAuditingSettings/{blobAuditingPolicyName}'} + + def list_by_sql_pool( + self, resource_group_name, workspace_name, sql_pool_name, custom_headers=None, raw=False, **operation_config): + """Lists extended auditing settings of a Sql pool. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace + :type workspace_name: str + :param sql_pool_name: SQL pool name + :type sql_pool_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of + ExtendedSqlPoolBlobAuditingPolicy + :rtype: + ~azure.mgmt.synapse.models.ExtendedSqlPoolBlobAuditingPolicyPaged[~azure.mgmt.synapse.models.ExtendedSqlPoolBlobAuditingPolicy] + :raises: :class:`CloudError` + """ + def prepare_request(next_link=None): + if not next_link: + # Construct URL + url = self.list_by_sql_pool.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'sqlPoolName': self._serialize.url("sql_pool_name", sql_pool_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + return request + + def internal_paging(next_link=None): + request = prepare_request(next_link) + + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + header_dict = None + if raw: + header_dict = {} + deserialized = models.ExtendedSqlPoolBlobAuditingPolicyPaged(internal_paging, self._deserialize.dependencies, header_dict) + + return deserialized + list_by_sql_pool.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/sqlPools/{sqlPoolName}/extendedAuditingSettings'} diff --git a/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/operations/_sql_pool_blob_auditing_policies_operations.py b/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/operations/_sql_pool_blob_auditing_policies_operations.py index 47b9ce352c24..7912ae9c2675 100644 --- a/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/operations/_sql_pool_blob_auditing_policies_operations.py +++ b/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/operations/_sql_pool_blob_auditing_policies_operations.py @@ -184,3 +184,79 @@ def create_or_update( return deserialized create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/sqlPools/{sqlPoolName}/auditingSettings/{blobAuditingPolicyName}'} + + def list_by_sql_pool( + self, resource_group_name, workspace_name, sql_pool_name, custom_headers=None, raw=False, **operation_config): + """Lists auditing settings of a Sql pool. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace + :type workspace_name: str + :param sql_pool_name: SQL pool name + :type sql_pool_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of SqlPoolBlobAuditingPolicy + :rtype: + ~azure.mgmt.synapse.models.SqlPoolBlobAuditingPolicyPaged[~azure.mgmt.synapse.models.SqlPoolBlobAuditingPolicy] + :raises: :class:`CloudError` + """ + def prepare_request(next_link=None): + if not next_link: + # Construct URL + url = self.list_by_sql_pool.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'sqlPoolName': self._serialize.url("sql_pool_name", sql_pool_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + return request + + def internal_paging(next_link=None): + request = prepare_request(next_link) + + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + header_dict = None + if raw: + header_dict = {} + deserialized = models.SqlPoolBlobAuditingPolicyPaged(internal_paging, self._deserialize.dependencies, header_dict) + + return deserialized + list_by_sql_pool.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/sqlPools/{sqlPoolName}/auditingSettings'} diff --git a/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/operations/_sql_pool_columns_operations.py b/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/operations/_sql_pool_columns_operations.py new file mode 100644 index 000000000000..fa04c3068dd0 --- /dev/null +++ b/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/operations/_sql_pool_columns_operations.py @@ -0,0 +1,114 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse +from msrestazure.azure_exceptions import CloudError + +from .. import models + + +class SqlPoolColumnsOperations(object): + """SqlPoolColumnsOperations operations. + + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The API version to use for this operation. Constant value: "2019-06-01-preview". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2019-06-01-preview" + + self.config = config + + def get( + self, resource_group_name, workspace_name, sql_pool_name, schema_name, table_name, column_name, custom_headers=None, raw=False, **operation_config): + """Get Sql pool column. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace + :type workspace_name: str + :param sql_pool_name: SQL pool name + :type sql_pool_name: str + :param schema_name: The name of the schema. + :type schema_name: str + :param table_name: The name of the table. + :type table_name: str + :param column_name: The name of the column. + :type column_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: SqlPoolColumn or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.synapse.models.SqlPoolColumn or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'sqlPoolName': self._serialize.url("sql_pool_name", sql_pool_name, 'str'), + 'schemaName': self._serialize.url("schema_name", schema_name, 'str'), + 'tableName': self._serialize.url("table_name", table_name, 'str'), + 'columnName': self._serialize.url("column_name", column_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('SqlPoolColumn', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/sqlPools/{sqlPoolName}/schemas/{schemaName}/tables/{tableName}/columns/{columnName}'} diff --git a/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/operations/_sql_pool_schemas_operations.py b/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/operations/_sql_pool_schemas_operations.py index 494a8374b914..cdebb2b74a23 100644 --- a/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/operations/_sql_pool_schemas_operations.py +++ b/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/operations/_sql_pool_schemas_operations.py @@ -121,3 +121,71 @@ def internal_paging(next_link=None): return deserialized list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/sqlPools/{sqlPoolName}/schemas'} + + def get( + self, resource_group_name, workspace_name, sql_pool_name, schema_name, custom_headers=None, raw=False, **operation_config): + """Get Sql Pool schema. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace + :type workspace_name: str + :param sql_pool_name: SQL pool name + :type sql_pool_name: str + :param schema_name: The name of the schema. + :type schema_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: SqlPoolSchema or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.synapse.models.SqlPoolSchema or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'sqlPoolName': self._serialize.url("sql_pool_name", sql_pool_name, 'str'), + 'schemaName': self._serialize.url("schema_name", schema_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('SqlPoolSchema', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/sqlPools/{sqlPoolName}/schemas/{schemaName}'} diff --git a/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/operations/_sql_pool_sensitivity_labels_operations.py b/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/operations/_sql_pool_sensitivity_labels_operations.py index 22ac677faa4c..61feb99672a3 100644 --- a/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/operations/_sql_pool_sensitivity_labels_operations.py +++ b/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/operations/_sql_pool_sensitivity_labels_operations.py @@ -370,6 +370,85 @@ def delete( return client_raw_response delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/sqlPools/{sqlPoolName}/schemas/{schemaName}/tables/{tableName}/columns/{columnName}/sensitivityLabels/{sensitivityLabelSource}'} + def get( + self, resource_group_name, workspace_name, sql_pool_name, schema_name, table_name, column_name, sensitivity_label_source, custom_headers=None, raw=False, **operation_config): + """Gets the sensitivity label of a given column. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace + :type workspace_name: str + :param sql_pool_name: SQL pool name + :type sql_pool_name: str + :param schema_name: The name of the schema. + :type schema_name: str + :param table_name: The name of the table. + :type table_name: str + :param column_name: The name of the column. + :type column_name: str + :param sensitivity_label_source: The source of the sensitivity label. + Possible values include: 'current', 'recommended' + :type sensitivity_label_source: str or + ~azure.mgmt.synapse.models.SensitivityLabelSource + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: SensitivityLabel or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.synapse.models.SensitivityLabel or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'sqlPoolName': self._serialize.url("sql_pool_name", sql_pool_name, 'str'), + 'schemaName': self._serialize.url("schema_name", schema_name, 'str'), + 'tableName': self._serialize.url("table_name", table_name, 'str'), + 'columnName': self._serialize.url("column_name", column_name, 'str'), + 'sensitivityLabelSource': self._serialize.url("sensitivity_label_source", sensitivity_label_source, 'SensitivityLabelSource') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('SensitivityLabel', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/sqlPools/{sqlPoolName}/schemas/{schemaName}/tables/{tableName}/columns/{columnName}/sensitivityLabels/{sensitivityLabelSource}'} + def enable_recommendation( self, resource_group_name, workspace_name, sql_pool_name, schema_name, table_name, column_name, custom_headers=None, raw=False, **operation_config): """Enables sensitivity recommendations on a given column (recommendations diff --git a/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/operations/_sql_pool_tables_operations.py b/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/operations/_sql_pool_tables_operations.py index b3c976f2f37e..632988a91303 100644 --- a/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/operations/_sql_pool_tables_operations.py +++ b/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/operations/_sql_pool_tables_operations.py @@ -124,3 +124,74 @@ def internal_paging(next_link=None): return deserialized list_by_schema.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/sqlPools/{sqlPoolName}/schemas/{schemaName}/tables'} + + def get( + self, resource_group_name, workspace_name, sql_pool_name, schema_name, table_name, custom_headers=None, raw=False, **operation_config): + """Get Sql pool table. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace + :type workspace_name: str + :param sql_pool_name: SQL pool name + :type sql_pool_name: str + :param schema_name: The name of the schema. + :type schema_name: str + :param table_name: The name of the table. + :type table_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: SqlPoolTable or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.synapse.models.SqlPoolTable or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'sqlPoolName': self._serialize.url("sql_pool_name", sql_pool_name, 'str'), + 'schemaName': self._serialize.url("schema_name", schema_name, 'str'), + 'tableName': self._serialize.url("table_name", table_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('SqlPoolTable', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/sqlPools/{sqlPoolName}/schemas/{schemaName}/tables/{tableName}'} diff --git a/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/operations/_sql_pool_vulnerability_assessment_rule_baselines_operations.py b/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/operations/_sql_pool_vulnerability_assessment_rule_baselines_operations.py index d032c532c8d2..0e8cd892dea7 100644 --- a/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/operations/_sql_pool_vulnerability_assessment_rule_baselines_operations.py +++ b/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/operations/_sql_pool_vulnerability_assessment_rule_baselines_operations.py @@ -195,3 +195,81 @@ def delete( client_raw_response = ClientRawResponse(None, response) return client_raw_response delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/sqlPools/{sqlPoolName}/vulnerabilityAssessments/{vulnerabilityAssessmentName}/rules/{ruleId}/baselines/{baselineName}'} + + def get( + self, resource_group_name, workspace_name, sql_pool_name, rule_id, baseline_name, custom_headers=None, raw=False, **operation_config): + """Gets a SqlPool's vulnerability assessment rule baseline. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace + :type workspace_name: str + :param sql_pool_name: SQL pool name + :type sql_pool_name: str + :param rule_id: The vulnerability assessment rule ID. + :type rule_id: str + :param baseline_name: The name of the vulnerability assessment rule + baseline (default implies a baseline on a Sql pool level rule and + master for server level rule). Possible values include: 'master', + 'default' + :type baseline_name: str or + ~azure.mgmt.synapse.models.VulnerabilityAssessmentPolicyBaselineName + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: SqlPoolVulnerabilityAssessmentRuleBaseline or + ClientRawResponse if raw=true + :rtype: + ~azure.mgmt.synapse.models.SqlPoolVulnerabilityAssessmentRuleBaseline + or ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'sqlPoolName': self._serialize.url("sql_pool_name", sql_pool_name, 'str'), + 'vulnerabilityAssessmentName': self._serialize.url("self.vulnerability_assessment_name", self.vulnerability_assessment_name, 'str'), + 'ruleId': self._serialize.url("rule_id", rule_id, 'str'), + 'baselineName': self._serialize.url("baseline_name", baseline_name, 'VulnerabilityAssessmentPolicyBaselineName') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('SqlPoolVulnerabilityAssessmentRuleBaseline', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/sqlPools/{sqlPoolName}/vulnerabilityAssessments/{vulnerabilityAssessmentName}/rules/{ruleId}/baselines/{baselineName}'} diff --git a/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/operations/_sql_pool_vulnerability_assessment_scans_operations.py b/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/operations/_sql_pool_vulnerability_assessment_scans_operations.py index 127f1b45b025..60a6e649d562 100644 --- a/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/operations/_sql_pool_vulnerability_assessment_scans_operations.py +++ b/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/operations/_sql_pool_vulnerability_assessment_scans_operations.py @@ -287,3 +287,74 @@ def export( return deserialized export.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/sqlPools/{sqlPoolName}/vulnerabilityAssessments/{vulnerabilityAssessmentName}/scans/{scanId}/export'} + + def get( + self, resource_group_name, workspace_name, sql_pool_name, scan_id, custom_headers=None, raw=False, **operation_config): + """Gets a vulnerability assessment scan record of a Sql pool. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace + :type workspace_name: str + :param sql_pool_name: SQL pool name + :type sql_pool_name: str + :param scan_id: The vulnerability assessment scan Id of the scan to + retrieve. + :type scan_id: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: VulnerabilityAssessmentScanRecord or ClientRawResponse if + raw=true + :rtype: ~azure.mgmt.synapse.models.VulnerabilityAssessmentScanRecord + or ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'sqlPoolName': self._serialize.url("sql_pool_name", sql_pool_name, 'str'), + 'vulnerabilityAssessmentName': self._serialize.url("self.vulnerability_assessment_name", self.vulnerability_assessment_name, 'str'), + 'scanId': self._serialize.url("scan_id", scan_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('VulnerabilityAssessmentScanRecord', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/sqlPools/{sqlPoolName}/vulnerabilityAssessments/{vulnerabilityAssessmentName}/scans/{scanId}'}