diff --git a/azurerm/internal/services/datashare/client/client.go b/azurerm/internal/services/datashare/client/client.go index 09bbc336a08c..d255b829b68a 100644 --- a/azurerm/internal/services/datashare/client/client.go +++ b/azurerm/internal/services/datashare/client/client.go @@ -7,6 +7,7 @@ import ( type Client struct { AccountClient *datashare.AccountsClient + DataSetClient *datashare.DataSetsClient SharesClient *datashare.SharesClient SynchronizationClient *datashare.SynchronizationSettingsClient } @@ -15,6 +16,9 @@ func NewClient(o *common.ClientOptions) *Client { accountClient := datashare.NewAccountsClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) o.ConfigureClient(&accountClient.Client, o.ResourceManagerAuthorizer) + dataSetClient := datashare.NewDataSetsClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) + o.ConfigureClient(&dataSetClient.Client, o.ResourceManagerAuthorizer) + sharesClient := datashare.NewSharesClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) o.ConfigureClient(&sharesClient.Client, o.ResourceManagerAuthorizer) @@ -23,6 +27,7 @@ func NewClient(o *common.ClientOptions) *Client { return &Client{ AccountClient: &accountClient, + DataSetClient: &dataSetClient, SharesClient: &sharesClient, SynchronizationClient: &synchronizationSettingsClient, } diff --git a/azurerm/internal/services/datashare/data_source_data_share_dataset_blob_storage.go b/azurerm/internal/services/datashare/data_source_data_share_dataset_blob_storage.go new file mode 100644 index 000000000000..f375ea94ab24 --- /dev/null +++ b/azurerm/internal/services/datashare/data_source_data_share_dataset_blob_storage.go @@ -0,0 +1,136 @@ +package datashare + +import ( + "fmt" + "time" + + "github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/helper" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" +) + +func dataSourceDataShareDatasetBlobStorage() *schema.Resource { + return &schema.Resource{ + Read: dataSourceArmDataShareDatasetBlobStorageRead, + + Timeouts: &schema.ResourceTimeout{ + Read: schema.DefaultTimeout(5 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.DatashareDataSetName(), + }, + + "share_id": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.DataShareID, + }, + + "container_name": { + Type: schema.TypeString, + Computed: true, + }, + + "storage_account_name": { + Type: schema.TypeString, + Computed: true, + }, + + "storage_account_resource_group_name": { + Type: schema.TypeString, + Computed: true, + }, + + "storage_account_subscription_id": { + Type: schema.TypeString, + Computed: true, + }, + + "file_path": { + Type: schema.TypeString, + Computed: true, + }, + + "folder_path": { + Type: schema.TypeString, + Computed: true, + }, + + "display_name": { + Type: schema.TypeString, + Computed: true, + }, + }, + } +} + +func dataSourceArmDataShareDatasetBlobStorageRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataShare.DataSetClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + shareID := d.Get("share_id").(string) + shareId, err := parse.DataShareID(shareID) + if err != nil { + return err + } + + respModel, err := client.Get(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name) + if err != nil { + return fmt.Errorf("retrieving DataShare Blob Storage DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) + } + + respId := helper.GetAzurermDataShareDataSetId(respModel.Value) + if respId == nil || *respId == "" { + return fmt.Errorf("empty or nil ID returned for reading DataShare Blob Storage DataSet %q (Resource Group %q / accountName %q / shareName %q)", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name) + } + + d.SetId(*respId) + d.Set("name", name) + d.Set("share_id", shareID) + + switch resp := respModel.Value.(type) { + case datashare.BlobDataSet: + if props := resp.BlobProperties; props != nil { + d.Set("container_name", props.ContainerName) + d.Set("storage_account_name", props.StorageAccountName) + d.Set("storage_account_resource_group_name", props.ResourceGroup) + d.Set("storage_account_subscription_id", props.SubscriptionID) + d.Set("file_path", props.FilePath) + d.Set("display_name", props.DataSetID) + } + + case datashare.BlobFolderDataSet: + if props := resp.BlobFolderProperties; props != nil { + d.Set("container_name", props.ContainerName) + d.Set("storage_account_name", props.StorageAccountName) + d.Set("storage_account_resource_group_name", props.ResourceGroup) + d.Set("storage_account_subscription_id", props.SubscriptionID) + d.Set("folder_path", props.Prefix) + d.Set("display_name", props.DataSetID) + } + + case datashare.BlobContainerDataSet: + if props := resp.BlobContainerProperties; props != nil { + d.Set("container_name", props.ContainerName) + d.Set("storage_account_name", props.StorageAccountName) + d.Set("storage_account_resource_group_name", props.ResourceGroup) + d.Set("storage_account_subscription_id", props.SubscriptionID) + d.Set("display_name", props.DataSetID) + } + + default: + return fmt.Errorf("data share dataset %q (Resource Group %q / accountName %q / shareName %q) is not a blob storage dataset", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name) + } + + return nil +} diff --git a/azurerm/internal/services/datashare/data_source_data_share_dataset_data_lake_gen1.go b/azurerm/internal/services/datashare/data_source_data_share_dataset_data_lake_gen1.go new file mode 100644 index 000000000000..e9bff91ac803 --- /dev/null +++ b/azurerm/internal/services/datashare/data_source_data_share_dataset_data_lake_gen1.go @@ -0,0 +1,121 @@ +package datashare + +import ( + "fmt" + "time" + + "github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/helper" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" +) + +func dataSourceDataShareDatasetDataLakeGen1() *schema.Resource { + return &schema.Resource{ + Read: dataSourceArmDataShareDatasetDataLakeGen1Read, + + Timeouts: &schema.ResourceTimeout{ + Read: schema.DefaultTimeout(5 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.DatashareDataSetName(), + }, + + "share_id": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.DataShareID, + }, + + "data_lake_store_name": { + Type: schema.TypeString, + Computed: true, + }, + + "data_lake_store_resource_group_name": { + Type: schema.TypeString, + Computed: true, + }, + + "data_lake_store_subscription_id": { + Type: schema.TypeString, + Computed: true, + }, + + "folder_path": { + Type: schema.TypeString, + Computed: true, + }, + + "file_name": { + Type: schema.TypeString, + Computed: true, + }, + + "display_name": { + Type: schema.TypeString, + Computed: true, + }, + }, + } +} + +func dataSourceArmDataShareDatasetDataLakeGen1Read(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataShare.DataSetClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + shareID := d.Get("share_id").(string) + shareId, err := parse.DataShareID(shareID) + if err != nil { + return err + } + + respModel, err := client.Get(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name) + if err != nil { + return fmt.Errorf("retrieving DataShare Data Lake Gen1 DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) + } + + respId := helper.GetAzurermDataShareDataSetId(respModel.Value) + if respId == nil || *respId == "" { + return fmt.Errorf("empty or nil ID returned for DataShare Data Lake Gen1 DataSet %q (Resource Group %q / accountName %q / shareName %q)", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name) + } + + d.SetId(*respId) + d.Set("name", name) + d.Set("share_id", shareID) + + switch resp := respModel.Value.(type) { + case datashare.ADLSGen1FileDataSet: + if props := resp.ADLSGen1FileProperties; props != nil { + d.Set("data_lake_store_name", props.AccountName) + d.Set("data_lake_store_resource_group_name", props.ResourceGroup) + d.Set("data_lake_store_subscription_id", props.SubscriptionID) + d.Set("folder_path", props.FolderPath) + d.Set("file_name", props.FileName) + d.Set("display_name", props.DataSetID) + } + + case datashare.ADLSGen1FolderDataSet: + if props := resp.ADLSGen1FolderProperties; props != nil { + d.Set("data_lake_store_name", props.AccountName) + d.Set("data_lake_store_resource_group_name", props.ResourceGroup) + d.Set("data_lake_store_subscription_id", props.SubscriptionID) + d.Set("folder_path", props.FolderPath) + d.Set("display_name", props.DataSetID) + } + + default: + return fmt.Errorf("data share dataset %q (Resource Group %q / accountName %q / shareName %q) is not a datalake store gen1 dataset", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name) + } + + return nil +} diff --git a/azurerm/internal/services/datashare/data_source_data_share_dataset_data_lake_gen2.go b/azurerm/internal/services/datashare/data_source_data_share_dataset_data_lake_gen2.go new file mode 100644 index 000000000000..1f521e5a83dc --- /dev/null +++ b/azurerm/internal/services/datashare/data_source_data_share_dataset_data_lake_gen2.go @@ -0,0 +1,136 @@ +package datashare + +import ( + "fmt" + "time" + + "github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/helper" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" +) + +func dataSourceDataShareDatasetDataLakeGen2() *schema.Resource { + return &schema.Resource{ + Read: dataSourceArmDataShareDatasetDataLakeGen2Read, + + Timeouts: &schema.ResourceTimeout{ + Read: schema.DefaultTimeout(5 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.DatashareDataSetName(), + }, + + "share_id": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.DataShareID, + }, + + "storage_account_name": { + Type: schema.TypeString, + Computed: true, + }, + + "storage_account_resource_group_name": { + Type: schema.TypeString, + Computed: true, + }, + + "storage_account_subscription_id": { + Type: schema.TypeString, + Computed: true, + }, + + "file_system_name": { + Type: schema.TypeString, + Computed: true, + }, + + "file_path": { + Type: schema.TypeString, + Computed: true, + }, + + "folder_path": { + Type: schema.TypeString, + Computed: true, + }, + + "display_name": { + Type: schema.TypeString, + Computed: true, + }, + }, + } +} + +func dataSourceArmDataShareDatasetDataLakeGen2Read(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataShare.DataSetClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + shareID := d.Get("share_id").(string) + shareId, err := parse.DataShareID(shareID) + if err != nil { + return err + } + + respModel, err := client.Get(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name) + if err != nil { + return fmt.Errorf("retrieving DataShare Data Lake Gen2 DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) + } + + respId := helper.GetAzurermDataShareDataSetId(respModel.Value) + if respId == nil || *respId == "" { + return fmt.Errorf("empty or nil ID returned for DataShare Data Lake Gen2 DataSet %q (Resource Group %q / accountName %q / shareName %q)", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name) + } + + d.SetId(*respId) + d.Set("name", name) + d.Set("share_id", shareID) + + switch resp := respModel.Value.(type) { + case datashare.ADLSGen2FileDataSet: + if props := resp.ADLSGen2FileProperties; props != nil { + d.Set("storage_account_name", props.StorageAccountName) + d.Set("storage_account_resource_group_name", props.ResourceGroup) + d.Set("storage_account_subscription_id", props.SubscriptionID) + d.Set("file_system_name", props.FileSystem) + d.Set("file_path", props.FilePath) + d.Set("display_name", props.DataSetID) + } + + case datashare.ADLSGen2FolderDataSet: + if props := resp.ADLSGen2FolderProperties; props != nil { + d.Set("storage_account_name", props.StorageAccountName) + d.Set("storage_account_resource_group_name", props.ResourceGroup) + d.Set("storage_account_subscription_id", props.SubscriptionID) + d.Set("file_system_name", props.FileSystem) + d.Set("folder_path", props.FolderPath) + d.Set("display_name", props.DataSetID) + } + + case datashare.ADLSGen2FileSystemDataSet: + if props := resp.ADLSGen2FileSystemProperties; props != nil { + d.Set("storage_account_name", props.StorageAccountName) + d.Set("storage_account_resource_group_name", props.ResourceGroup) + d.Set("storage_account_subscription_id", props.SubscriptionID) + d.Set("file_system_name", props.FileSystem) + d.Set("display_name", props.DataSetID) + } + + default: + return fmt.Errorf("data share dataset %q (Resource Group %q / accountName %q / shareName %q) is not a datalake store gen2 dataset", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name) + } + + return nil +} diff --git a/azurerm/internal/services/datashare/data_source_data_share_dataset_kusto_cluster.go b/azurerm/internal/services/datashare/data_source_data_share_dataset_kusto_cluster.go new file mode 100644 index 000000000000..90e9099bb66b --- /dev/null +++ b/azurerm/internal/services/datashare/data_source_data_share_dataset_kusto_cluster.go @@ -0,0 +1,91 @@ +package datashare + +import ( + "fmt" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/helper" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" +) + +func dataSourceDataShareDatasetKustoCluster() *schema.Resource { + return &schema.Resource{ + Read: dataSourceArmDataShareDatasetKustoClusterRead, + + Timeouts: &schema.ResourceTimeout{ + Read: schema.DefaultTimeout(5 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.DatashareDataSetName(), + }, + + "share_id": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.DataShareID, + }, + + "kusto_cluster_id": { + Type: schema.TypeString, + Computed: true, + }, + + "display_name": { + Type: schema.TypeString, + Computed: true, + }, + + "kusto_cluster_location": { + Type: schema.TypeString, + Computed: true, + }, + }, + } +} + +func dataSourceArmDataShareDatasetKustoClusterRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataShare.DataSetClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + shareID := d.Get("share_id").(string) + shareId, err := parse.DataShareID(shareID) + if err != nil { + return err + } + + respModel, err := client.Get(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name) + if err != nil { + return fmt.Errorf("retrieving DataShare Kusto Cluster DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) + } + + respId := helper.GetAzurermDataShareDataSetId(respModel.Value) + if respId == nil || *respId == "" { + return fmt.Errorf("empty or nil ID returned for DataShare Kusto Cluster DataSet %q (Resource Group %q / accountName %q / shareName %q)", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name) + } + + d.SetId(*respId) + d.Set("name", name) + d.Set("share_id", shareID) + + resp, ok := respModel.Value.AsKustoClusterDataSet() + if !ok { + return fmt.Errorf("dataShare %q (Resource Group %q / accountName %q / shareName %q) is not kusto cluster dataset", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name) + } + if props := resp.KustoClusterDataSetProperties; props != nil { + d.Set("kusto_cluster_id", props.KustoClusterResourceID) + d.Set("display_name", props.DataSetID) + d.Set("kusto_cluster_location", props.Location) + } + + return nil +} diff --git a/azurerm/internal/services/datashare/data_source_data_share_dataset_kusto_database.go b/azurerm/internal/services/datashare/data_source_data_share_dataset_kusto_database.go new file mode 100644 index 000000000000..12809e4e2a8a --- /dev/null +++ b/azurerm/internal/services/datashare/data_source_data_share_dataset_kusto_database.go @@ -0,0 +1,91 @@ +package datashare + +import ( + "fmt" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/helper" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" +) + +func dataSourceDataShareDatasetKustoDatabase() *schema.Resource { + return &schema.Resource{ + Read: dataSourceArmDataShareDatasetKustoDatabaseRead, + + Timeouts: &schema.ResourceTimeout{ + Read: schema.DefaultTimeout(5 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.DatashareDataSetName(), + }, + + "share_id": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.DataShareID, + }, + + "kusto_database_id": { + Type: schema.TypeString, + Computed: true, + }, + + "display_name": { + Type: schema.TypeString, + Computed: true, + }, + + "kusto_cluster_location": { + Type: schema.TypeString, + Computed: true, + }, + }, + } +} + +func dataSourceArmDataShareDatasetKustoDatabaseRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataShare.DataSetClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + shareID := d.Get("share_id").(string) + shareId, err := parse.DataShareID(shareID) + if err != nil { + return err + } + + respModel, err := client.Get(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name) + if err != nil { + return fmt.Errorf("retrieving DataShare Kusto Database DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) + } + + respId := helper.GetAzurermDataShareDataSetId(respModel.Value) + if respId == nil || *respId == "" { + return fmt.Errorf("empty or nil ID returned for DataShare Kusto Database DataSet %q (Resource Group %q / accountName %q / shareName %q)", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name) + } + + d.SetId(*respId) + d.Set("name", name) + d.Set("share_id", shareID) + + resp, ok := respModel.Value.AsKustoDatabaseDataSet() + if !ok { + return fmt.Errorf("dataShare %q (Resource Group %q / accountName %q / shareName %q) is not kusto database dataset", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name) + } + if props := resp.KustoDatabaseDataSetProperties; props != nil { + d.Set("kusto_database_id", props.KustoDatabaseResourceID) + d.Set("display_name", props.DataSetID) + d.Set("kusto_cluster_location", props.Location) + } + + return nil +} diff --git a/azurerm/internal/services/datashare/helper/data_share.go b/azurerm/internal/services/datashare/helper/data_share.go new file mode 100644 index 000000000000..6d56a0748a8f --- /dev/null +++ b/azurerm/internal/services/datashare/helper/data_share.go @@ -0,0 +1,39 @@ +package helper + +import ( + "github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare" +) + +func GetAzurermDataShareDataSetId(dataset datashare.BasicDataSet) *string { + if dataset == nil { + return nil + } + switch t := dataset.(type) { + case datashare.BlobDataSet: + return t.ID + case datashare.BlobFolderDataSet: + return t.ID + case datashare.BlobContainerDataSet: + return t.ID + case datashare.ADLSGen2FileDataSet: + return t.ID + case datashare.ADLSGen2FolderDataSet: + return t.ID + case datashare.ADLSGen2FileSystemDataSet: + return t.ID + case datashare.ADLSGen1FolderDataSet: + return t.ID + case datashare.ADLSGen1FileDataSet: + return t.ID + case datashare.KustoClusterDataSet: + return t.ID + case datashare.KustoDatabaseDataSet: + return t.ID + case datashare.SQLDWTableDataSet: + return t.ID + case datashare.SQLDBTableDataSet: + return t.ID + default: + return nil + } +} diff --git a/azurerm/internal/services/datashare/parse/data_share.go b/azurerm/internal/services/datashare/parse/data_share.go index a9247a7b0da5..88331cbd53dd 100644 --- a/azurerm/internal/services/datashare/parse/data_share.go +++ b/azurerm/internal/services/datashare/parse/data_share.go @@ -17,6 +17,13 @@ type DataShareId struct { Name string } +type DataShareDataSetId struct { + ResourceGroup string + AccountName string + ShareName string + Name string +} + func DataShareAccountID(input string) (*DataShareAccountId, error) { id, err := azure.ParseAzureResourceID(input) if err != nil { @@ -57,3 +64,28 @@ func DataShareID(input string) (*DataShareId, error) { return &DataShare, nil } + +func DataShareDataSetID(input string) (*DataShareDataSetId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, fmt.Errorf("[ERROR] Unable to parse DataShareDataSet ID %q: %+v", input, err) + } + + dataShareDataSet := DataShareDataSetId{ + ResourceGroup: id.ResourceGroup, + } + if dataShareDataSet.AccountName, err = id.PopSegment("accounts"); err != nil { + return nil, err + } + if dataShareDataSet.ShareName, err = id.PopSegment("shares"); err != nil { + return nil, err + } + if dataShareDataSet.Name, err = id.PopSegment("dataSets"); err != nil { + return nil, err + } + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &dataShareDataSet, nil +} diff --git a/azurerm/internal/services/datashare/parse/data_share_test.go b/azurerm/internal/services/datashare/parse/data_share_test.go index fe1d4f3deca5..6c8c6e96d3ee 100644 --- a/azurerm/internal/services/datashare/parse/data_share_test.go +++ b/azurerm/internal/services/datashare/parse/data_share_test.go @@ -152,3 +152,100 @@ func TestDataShareID(t *testing.T) { } } } + +func TestDataShareDataSetID(t *testing.T) { + testData := []struct { + Name string + Input string + Expected *DataShareDataSetId + }{ + { + Name: "Empty", + Input: "", + Expected: nil, + }, + { + Name: "No Resource Groups Segment", + Input: "/subscriptions/00000000-0000-0000-0000-000000000000", + Expected: nil, + }, + { + Name: "No Resource Groups Value", + Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", + Expected: nil, + }, + { + Name: "Resource Group ID", + Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", + Expected: nil, + }, + { + Name: "Missing Account Value", + Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DataShare/accounts/", + Expected: nil, + }, + { + Name: "Missing Share", + Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DataShare/accounts/account1/", + Expected: nil, + }, + { + Name: "Missing Share Value", + Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DataShare/accounts/account1/shares/", + Expected: nil, + }, + { + Name: "Missing DataSet", + Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DataShare/accounts/account1/shares/share1", + Expected: nil, + }, + { + Name: "Missing DataSet Value", + Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DataShare/accounts/account1/shares/share1/dataSets", + Expected: nil, + }, + { + Name: "DataSet ID", + Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DataShare/accounts/account1/shares/share1/dataSets/dataSet1", + Expected: &DataShareDataSetId{ + Name: "dataSet1", + AccountName: "account1", + ResourceGroup: "resGroup1", + ShareName: "share1", + }, + }, + { + Name: "Wrong Casing", + Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DataShare/accounts/account1/shares/share1/DataSets/dataSet1", + Expected: nil, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q..", v.Name) + + actual, err := DataShareDataSetID(v.Input) + if err != nil { + if v.Expected == nil { + continue + } + t.Fatalf("Expected a value but got an error: %s", err) + } + + if actual.ShareName != v.Expected.ShareName { + t.Fatalf("Expected %q but got %q for account name", v.Expected.ShareName, actual.ShareName) + } + + if actual.AccountName != v.Expected.AccountName { + t.Fatalf("Expected %q but got %q for account name", v.Expected.AccountName, actual.AccountName) + } + + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/datashare/registration.go b/azurerm/internal/services/datashare/registration.go index 87cff3e1cbaa..173a486d8c17 100644 --- a/azurerm/internal/services/datashare/registration.go +++ b/azurerm/internal/services/datashare/registration.go @@ -19,15 +19,25 @@ func (r Registration) WebsiteCategories() []string { // SupportedDataSources returns the supported Data Sources supported by this Service func (r Registration) SupportedDataSources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_data_share_account": dataSourceDataShareAccount(), - "azurerm_data_share": dataSourceDataShare(), + "azurerm_data_share_account": dataSourceDataShareAccount(), + "azurerm_data_share": dataSourceDataShare(), + "azurerm_data_share_dataset_blob_storage": dataSourceDataShareDatasetBlobStorage(), + "azurerm_data_share_dataset_data_lake_gen1": dataSourceDataShareDatasetDataLakeGen1(), + "azurerm_data_share_dataset_data_lake_gen2": dataSourceDataShareDatasetDataLakeGen2(), + "azurerm_data_share_dataset_kusto_cluster": dataSourceDataShareDatasetKustoCluster(), + "azurerm_data_share_dataset_kusto_database": dataSourceDataShareDatasetKustoDatabase(), } } // SupportedResources returns the supported Resources supported by this Service func (r Registration) SupportedResources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_data_share_account": resourceArmDataShareAccount(), - "azurerm_data_share": resourceArmDataShare(), + "azurerm_data_share_account": resourceArmDataShareAccount(), + "azurerm_data_share": resourceArmDataShare(), + "azurerm_data_share_dataset_blob_storage": resourceArmDataShareDataSetBlobStorage(), + "azurerm_data_share_dataset_data_lake_gen1": resourceArmDataShareDataSetDataLakeGen1(), + "azurerm_data_share_dataset_data_lake_gen2": resourceArmDataShareDataSetDataLakeGen2(), + "azurerm_data_share_dataset_kusto_cluster": resourceArmDataShareDataSetKustoCluster(), + "azurerm_data_share_dataset_kusto_database": resourceArmDataShareDataSetKustoDatabase(), } } diff --git a/azurerm/internal/services/datashare/resource_arm_data_share_dataset_blob_storage.go b/azurerm/internal/services/datashare/resource_arm_data_share_dataset_blob_storage.go new file mode 100644 index 000000000000..9a8bd1dde2d4 --- /dev/null +++ b/azurerm/internal/services/datashare/resource_arm_data_share_dataset_blob_storage.go @@ -0,0 +1,260 @@ +package datashare + +import ( + "fmt" + "log" + "time" + + "github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + azValidate "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/helper" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/storage" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceArmDataShareDataSetBlobStorage() *schema.Resource { + return &schema.Resource{ + Create: resourceArmDataShareDataSetBlobStorageCreate, + Read: resourceArmDataShareDataSetBlobStorageRead, + Delete: resourceArmDataShareDataSetBlobStorageDelete, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.DataShareDataSetID(id) + return err + }), + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.DatashareDataSetName(), + }, + + "share_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.DataShareID, + }, + + "container_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: azValidate.StorageContainerName, + }, + + "storage_account_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: storage.ValidateArmStorageAccountName, + }, + + "storage_account_resource_group_name": azure.SchemaResourceGroupName(), + + "storage_account_subscription_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.IsUUID, + }, + + "file_path": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + ConflictsWith: []string{"folder_path"}, + }, + + "folder_path": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + ConflictsWith: []string{"file_path"}, + }, + + "display_name": { + Type: schema.TypeString, + Computed: true, + }, + }, + } +} +func resourceArmDataShareDataSetBlobStorageCreate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataShare.DataSetClient + ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + shareId, err := parse.DataShareID(d.Get("share_id").(string)) + if err != nil { + return err + } + + existing, err := client.Get(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("checking for present of existing DataShare Blob Storage DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) + } + } + existingId := helper.GetAzurermDataShareDataSetId(existing.Value) + if existingId != nil && *existingId != "" { + return tf.ImportAsExistsError("azurerm_data_share_dataset_blob_storage", *existingId) + } + + var dataSet datashare.BasicDataSet + if filePath, ok := d.GetOk("file_path"); ok { + dataSet = datashare.BlobDataSet{ + Kind: datashare.KindBlob, + BlobProperties: &datashare.BlobProperties{ + ContainerName: utils.String(d.Get("container_name").(string)), + StorageAccountName: utils.String(d.Get("storage_account_name").(string)), + ResourceGroup: utils.String(d.Get("storage_account_resource_group_name").(string)), + SubscriptionID: utils.String(d.Get("storage_account_subscription_id").(string)), + FilePath: utils.String(filePath.(string)), + }, + } + } else if folderPath, ok := d.GetOk("folder_path"); ok { + dataSet = datashare.BlobFolderDataSet{ + Kind: datashare.KindBlobFolder, + BlobFolderProperties: &datashare.BlobFolderProperties{ + ContainerName: utils.String(d.Get("container_name").(string)), + StorageAccountName: utils.String(d.Get("storage_account_name").(string)), + ResourceGroup: utils.String(d.Get("storage_account_resource_group_name").(string)), + SubscriptionID: utils.String(d.Get("storage_account_subscription_id").(string)), + Prefix: utils.String(folderPath.(string)), + }, + } + } else { + dataSet = datashare.BlobContainerDataSet{ + Kind: datashare.KindContainer, + BlobContainerProperties: &datashare.BlobContainerProperties{ + ContainerName: utils.String(d.Get("container_name").(string)), + StorageAccountName: utils.String(d.Get("storage_account_name").(string)), + ResourceGroup: utils.String(d.Get("storage_account_resource_group_name").(string)), + SubscriptionID: utils.String(d.Get("storage_account_subscription_id").(string)), + }, + } + } + + if _, err := client.Create(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name, dataSet); err != nil { + return fmt.Errorf("creating DataShare Blob Storage DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) + } + + resp, err := client.Get(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name) + if err != nil { + return fmt.Errorf("retrieving DataShare Blob Storage DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) + } + + respId := helper.GetAzurermDataShareDataSetId(resp.Value) + if respId == nil || *respId == "" { + return fmt.Errorf("empty or nil ID returned for DataShare Blob Storage DataSet %q (Resource Group %q / accountName %q / shareName %q)", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name) + } + + d.SetId(*respId) + return resourceArmDataShareDataSetBlobStorageRead(d, meta) +} + +func resourceArmDataShareDataSetBlobStorageRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataShare.DataSetClient + shareClient := meta.(*clients.Client).DataShare.SharesClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.DataShareDataSetID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.AccountName, id.ShareName, id.Name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + log.Printf("[INFO] DataShare %q does not exist - removing from state", d.Id()) + d.SetId("") + return nil + } + return fmt.Errorf("retrieving DataShare Blob Storage DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", id.Name, id.ResourceGroup, id.AccountName, id.ShareName, err) + } + + d.Set("name", id.Name) + shareResp, err := shareClient.Get(ctx, id.ResourceGroup, id.AccountName, id.ShareName) + if err != nil { + return fmt.Errorf("retrieving DataShare %q (Resource Group %q / accountName %q): %+v", id.ShareName, id.ResourceGroup, id.AccountName, err) + } + if shareResp.ID == nil || *shareResp.ID == "" { + return fmt.Errorf("empty or nil ID returned for DataShare %q (Resource Group %q / accountName %q)", id.ShareName, id.ResourceGroup, id.AccountName) + } + + d.Set("share_id", shareResp.ID) + + switch resp := resp.Value.(type) { + case datashare.BlobDataSet: + if props := resp.BlobProperties; props != nil { + d.Set("container_name", props.ContainerName) + d.Set("storage_account_name", props.StorageAccountName) + d.Set("storage_account_resource_group_name", props.ResourceGroup) + d.Set("storage_account_subscription_id", props.SubscriptionID) + d.Set("file_path", props.FilePath) + d.Set("display_name", props.DataSetID) + } + + case datashare.BlobFolderDataSet: + if props := resp.BlobFolderProperties; props != nil { + d.Set("container_name", props.ContainerName) + d.Set("storage_account_name", props.StorageAccountName) + d.Set("storage_account_resource_group_name", props.ResourceGroup) + d.Set("storage_account_subscription_id", props.SubscriptionID) + d.Set("folder_path", props.Prefix) + d.Set("display_name", props.DataSetID) + } + + case datashare.BlobContainerDataSet: + if props := resp.BlobContainerProperties; props != nil { + d.Set("container_name", props.ContainerName) + d.Set("storage_account_name", props.StorageAccountName) + d.Set("storage_account_resource_group_name", props.ResourceGroup) + d.Set("storage_account_subscription_id", props.SubscriptionID) + d.Set("display_name", props.DataSetID) + } + + default: + return fmt.Errorf("data share dataset %q (Resource Group %q / accountName %q / shareName %q) is not a blob storage dataset", id.Name, id.ResourceGroup, id.AccountName, id.ShareName) + } + + return nil +} + +func resourceArmDataShareDataSetBlobStorageDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataShare.DataSetClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.DataShareDataSetID(d.Id()) + if err != nil { + return err + } + + if _, err := client.Delete(ctx, id.ResourceGroup, id.AccountName, id.ShareName, id.Name); err != nil { + return fmt.Errorf("deleting DataShare Blob Storage DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", id.Name, id.ResourceGroup, id.AccountName, id.ShareName, err) + } + return nil +} diff --git a/azurerm/internal/services/datashare/resource_arm_data_share_dataset_data_lake_gen1.go b/azurerm/internal/services/datashare/resource_arm_data_share_dataset_data_lake_gen1.go new file mode 100644 index 000000000000..d20a9abcf772 --- /dev/null +++ b/azurerm/internal/services/datashare/resource_arm_data_share_dataset_data_lake_gen1.go @@ -0,0 +1,227 @@ +package datashare + +import ( + "fmt" + "log" + "time" + + "github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/helper" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/validate" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceArmDataShareDataSetDataLakeGen1() *schema.Resource { + return &schema.Resource{ + Create: resourceArmDataShareDataSetDataLakeGen1Create, + Read: resourceArmDataShareDataSetDataLakeGen1Read, + Delete: resourceArmDataShareDataSetDataLakeGen1Delete, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.DataShareDataSetID(id) + return err + }), + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.DatashareDataSetName(), + }, + + "share_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.DataShareID, + }, + + "data_lake_store_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: azure.ValidateDataLakeAccountName(), + }, + + "data_lake_store_resource_group_name": azure.SchemaResourceGroupName(), + + "data_lake_store_subscription_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.IsUUID, + }, + + "folder_path": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "file_name": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "display_name": { + Type: schema.TypeString, + Computed: true, + }, + }, + } +} +func resourceArmDataShareDataSetDataLakeGen1Create(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataShare.DataSetClient + ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + shareId, err := parse.DataShareID(d.Get("share_id").(string)) + if err != nil { + return err + } + + existing, err := client.Get(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("checking for present of existing DataShare DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) + } + } + existingId := helper.GetAzurermDataShareDataSetId(existing.Value) + if existingId != nil && *existingId != "" { + return tf.ImportAsExistsError("azurerm_data_share_dataset_data_lake_gen1", *existingId) + } + + var dataSet datashare.BasicDataSet + + if fileName, ok := d.GetOk("file_name"); ok { + dataSet = datashare.ADLSGen1FileDataSet{ + Kind: datashare.KindAdlsGen1File, + ADLSGen1FileProperties: &datashare.ADLSGen1FileProperties{ + AccountName: utils.String(d.Get("data_lake_store_name").(string)), + ResourceGroup: utils.String(d.Get("data_lake_store_resource_group_name").(string)), + SubscriptionID: utils.String(d.Get("data_lake_store_subscription_id").(string)), + FolderPath: utils.String(d.Get("folder_path").(string)), + FileName: utils.String(fileName.(string)), + }, + } + } else { + dataSet = datashare.ADLSGen1FolderDataSet{ + Kind: datashare.KindAdlsGen1Folder, + ADLSGen1FolderProperties: &datashare.ADLSGen1FolderProperties{ + AccountName: utils.String(d.Get("data_lake_store_name").(string)), + ResourceGroup: utils.String(d.Get("data_lake_store_resource_group_name").(string)), + SubscriptionID: utils.String(d.Get("data_lake_store_subscription_id").(string)), + FolderPath: utils.String(d.Get("folder_path").(string)), + }, + } + } + + if _, err := client.Create(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name, dataSet); err != nil { + return fmt.Errorf("creating/updating DataShare DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) + } + + resp, err := client.Get(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name) + if err != nil { + return fmt.Errorf("retrieving DataShare DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) + } + + respId := helper.GetAzurermDataShareDataSetId(resp.Value) + if respId == nil || *respId == "" { + return fmt.Errorf("empty or nil ID returned for DataShare DataSet %q (Resource Group %q / accountName %q / shareName %q)", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name) + } + + d.SetId(*respId) + return resourceArmDataShareDataSetDataLakeGen1Read(d, meta) +} + +func resourceArmDataShareDataSetDataLakeGen1Read(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataShare.DataSetClient + shareClient := meta.(*clients.Client).DataShare.SharesClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.DataShareDataSetID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.AccountName, id.ShareName, id.Name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + log.Printf("[INFO] DataShare %q does not exist - removing from state", d.Id()) + d.SetId("") + return nil + } + return fmt.Errorf("retrieving DataShare DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", id.Name, id.ResourceGroup, id.AccountName, id.ShareName, err) + } + d.Set("name", id.Name) + shareResp, err := shareClient.Get(ctx, id.ResourceGroup, id.AccountName, id.ShareName) + if err != nil { + return fmt.Errorf("retrieving DataShare %q (Resource Group %q / accountName %q): %+v", id.ShareName, id.ResourceGroup, id.AccountName, err) + } + if shareResp.ID == nil || *shareResp.ID == "" { + return fmt.Errorf("reading ID of DataShare %q (Resource Group %q / accountName %q): ID is empt", id.ShareName, id.ResourceGroup, id.AccountName) + } + d.Set("share_id", shareResp.ID) + + switch resp := resp.Value.(type) { + case datashare.ADLSGen1FileDataSet: + if props := resp.ADLSGen1FileProperties; props != nil { + d.Set("data_lake_store_name", props.AccountName) + d.Set("data_lake_store_resource_group_name", props.ResourceGroup) + d.Set("data_lake_store_subscription_id", props.SubscriptionID) + d.Set("folder_path", props.FolderPath) + d.Set("file_name", props.FileName) + d.Set("display_name", props.DataSetID) + } + + case datashare.ADLSGen1FolderDataSet: + if props := resp.ADLSGen1FolderProperties; props != nil { + d.Set("data_lake_store_name", props.AccountName) + d.Set("data_lake_store_resource_group_name", props.ResourceGroup) + d.Set("data_lake_store_subscription_id", props.SubscriptionID) + d.Set("folder_path", props.FolderPath) + d.Set("display_name", props.DataSetID) + } + + default: + return fmt.Errorf("data share dataset %q (Resource Group %q / accountName %q / shareName %q) is not a datalake store gen1 dataset", id.Name, id.ResourceGroup, id.AccountName, id.ShareName) + } + + return nil +} + +func resourceArmDataShareDataSetDataLakeGen1Delete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataShare.DataSetClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.DataShareDataSetID(d.Id()) + if err != nil { + return err + } + + if _, err := client.Delete(ctx, id.ResourceGroup, id.AccountName, id.ShareName, id.Name); err != nil { + return fmt.Errorf("deleting DataShare DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", id.Name, id.ResourceGroup, id.AccountName, id.ShareName, err) + } + return nil +} diff --git a/azurerm/internal/services/datashare/resource_arm_data_share_dataset_data_lake_gen2.go b/azurerm/internal/services/datashare/resource_arm_data_share_dataset_data_lake_gen2.go new file mode 100644 index 000000000000..22be191af012 --- /dev/null +++ b/azurerm/internal/services/datashare/resource_arm_data_share_dataset_data_lake_gen2.go @@ -0,0 +1,258 @@ +package datashare + +import ( + "fmt" + "log" + "time" + + "github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/helper" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/storage" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceArmDataShareDataSetDataLakeGen2() *schema.Resource { + return &schema.Resource{ + Create: resourceArmDataShareDataSetDataLakeGen2Create, + Read: resourceArmDataShareDataSetDataLakeGen2Read, + Delete: resourceArmDataShareDataSetDataLakeGen2Delete, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.DataShareDataSetID(id) + return err + }), + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.DatashareDataSetName(), + }, + + "share_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.DataShareID, + }, + + "storage_account_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: storage.ValidateArmStorageAccountName, + }, + + "storage_account_resource_group_name": azure.SchemaResourceGroupName(), + + "storage_account_subscription_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.IsUUID, + }, + + "file_system_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "file_path": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + ConflictsWith: []string{"folder_path"}, + }, + + "folder_path": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + ConflictsWith: []string{"file_path"}, + }, + + "display_name": { + Type: schema.TypeString, + Computed: true, + }, + }, + } +} +func resourceArmDataShareDataSetDataLakeGen2Create(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataShare.DataSetClient + ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + shareId, err := parse.DataShareID(d.Get("share_id").(string)) + if err != nil { + return err + } + + existing, err := client.Get(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("checking for present of existing DataShare DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) + } + } + existingId := helper.GetAzurermDataShareDataSetId(existing.Value) + if existingId != nil && *existingId != "" { + return tf.ImportAsExistsError("azurerm_data_share_dataset_data_lake_gen2", *existingId) + } + + var dataSet datashare.BasicDataSet + + if filePath, ok := d.GetOk("file_path"); ok { + dataSet = datashare.ADLSGen2FileDataSet{ + Kind: datashare.KindAdlsGen2File, + ADLSGen2FileProperties: &datashare.ADLSGen2FileProperties{ + StorageAccountName: utils.String(d.Get("storage_account_name").(string)), + ResourceGroup: utils.String(d.Get("storage_account_resource_group_name").(string)), + SubscriptionID: utils.String(d.Get("storage_account_subscription_id").(string)), + FileSystem: utils.String(d.Get("file_system_name").(string)), + FilePath: utils.String(filePath.(string)), + }, + } + } else if folderPath, ok := d.GetOk("folder_path"); ok { + dataSet = datashare.ADLSGen2FolderDataSet{ + Kind: datashare.KindAdlsGen2Folder, + ADLSGen2FolderProperties: &datashare.ADLSGen2FolderProperties{ + StorageAccountName: utils.String(d.Get("storage_account_name").(string)), + ResourceGroup: utils.String(d.Get("storage_account_resource_group_name").(string)), + SubscriptionID: utils.String(d.Get("storage_account_subscription_id").(string)), + FileSystem: utils.String(d.Get("file_system_name").(string)), + FolderPath: utils.String(folderPath.(string)), + }, + } + } else { + dataSet = datashare.ADLSGen2FileSystemDataSet{ + Kind: datashare.KindAdlsGen2FileSystem, + ADLSGen2FileSystemProperties: &datashare.ADLSGen2FileSystemProperties{ + StorageAccountName: utils.String(d.Get("storage_account_name").(string)), + ResourceGroup: utils.String(d.Get("storage_account_resource_group_name").(string)), + SubscriptionID: utils.String(d.Get("storage_account_subscription_id").(string)), + FileSystem: utils.String(d.Get("file_system_name").(string)), + }, + } + } + + if _, err := client.Create(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name, dataSet); err != nil { + return fmt.Errorf("creating DataShare DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) + } + + resp, err := client.Get(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name) + if err != nil { + return fmt.Errorf("retrieving DataShare DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) + } + + respId := helper.GetAzurermDataShareDataSetId(resp.Value) + if respId == nil || *respId == "" { + return fmt.Errorf("empty or nil ID returned for DataShare DataSet %q (Resource Group %q / accountName %q / shareName %q)", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name) + } + + d.SetId(*respId) + return resourceArmDataShareDataSetDataLakeGen2Read(d, meta) +} + +func resourceArmDataShareDataSetDataLakeGen2Read(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataShare.DataSetClient + shareClient := meta.(*clients.Client).DataShare.SharesClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.DataShareDataSetID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.AccountName, id.ShareName, id.Name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + log.Printf("[INFO] DataShare %q does not exist - removing from state", d.Id()) + d.SetId("") + return nil + } + return fmt.Errorf("retrieving DataShare DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", id.Name, id.ResourceGroup, id.AccountName, id.ShareName, err) + } + d.Set("name", id.Name) + shareResp, err := shareClient.Get(ctx, id.ResourceGroup, id.AccountName, id.ShareName) + if err != nil { + return fmt.Errorf("retrieving DataShare %q (Resource Group %q / accountName %q): %+v", id.ShareName, id.ResourceGroup, id.AccountName, err) + } + if shareResp.ID == nil || *shareResp.ID == "" { + return fmt.Errorf("empty or nil ID returned for DataShare %q (Resource Group %q / accountName %q)", id.ShareName, id.ResourceGroup, id.AccountName) + } + d.Set("share_id", shareResp.ID) + + switch resp := resp.Value.(type) { + case datashare.ADLSGen2FileDataSet: + if props := resp.ADLSGen2FileProperties; props != nil { + d.Set("storage_account_name", props.StorageAccountName) + d.Set("storage_account_resource_group_name", props.ResourceGroup) + d.Set("storage_account_subscription_id", props.SubscriptionID) + d.Set("file_system_name", props.FileSystem) + d.Set("file_path", props.FilePath) + d.Set("display_name", props.DataSetID) + } + + case datashare.ADLSGen2FolderDataSet: + if props := resp.ADLSGen2FolderProperties; props != nil { + d.Set("storage_account_name", props.StorageAccountName) + d.Set("storage_account_resource_group_name", props.ResourceGroup) + d.Set("storage_account_subscription_id", props.SubscriptionID) + d.Set("file_system_name", props.FileSystem) + d.Set("folder_path", props.FolderPath) + d.Set("display_name", props.DataSetID) + } + + case datashare.ADLSGen2FileSystemDataSet: + if props := resp.ADLSGen2FileSystemProperties; props != nil { + d.Set("storage_account_name", props.StorageAccountName) + d.Set("storage_account_resource_group_name", props.ResourceGroup) + d.Set("storage_account_subscription_id", props.SubscriptionID) + d.Set("file_system_name", props.FileSystem) + d.Set("display_name", props.DataSetID) + } + + default: + return fmt.Errorf("data share dataset %q (Resource Group %q / accountName %q / shareName %q) is not a datalake store gen2 dataset", id.Name, id.ResourceGroup, id.AccountName, id.ShareName) + } + + return nil +} + +func resourceArmDataShareDataSetDataLakeGen2Delete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataShare.DataSetClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.DataShareDataSetID(d.Id()) + if err != nil { + return err + } + + if _, err := client.Delete(ctx, id.ResourceGroup, id.AccountName, id.ShareName, id.Name); err != nil { + return fmt.Errorf("deleting DataShare DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", id.Name, id.ResourceGroup, id.AccountName, id.ShareName, err) + } + return nil +} diff --git a/azurerm/internal/services/datashare/resource_arm_data_share_dataset_kusto_cluster.go b/azurerm/internal/services/datashare/resource_arm_data_share_dataset_kusto_cluster.go new file mode 100644 index 000000000000..ba5546523728 --- /dev/null +++ b/azurerm/internal/services/datashare/resource_arm_data_share_dataset_kusto_cluster.go @@ -0,0 +1,178 @@ +package datashare + +import ( + "fmt" + "log" + "time" + + "github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/helper" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/validate" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceArmDataShareDataSetKustoCluster() *schema.Resource { + return &schema.Resource{ + Create: resourceArmDataShareDataSetKustoClusterCreate, + Read: resourceArmDataShareDataSetKustoClusterRead, + Delete: resourceArmDataShareDataSetKustoClusterDelete, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.DataShareDataSetID(id) + return err + }), + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.DatashareDataSetName(), + }, + + "share_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.DataShareID, + }, + + "kusto_cluster_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: azure.ValidateResourceID, + }, + + "display_name": { + Type: schema.TypeString, + Computed: true, + }, + + "kusto_cluster_location": { + Type: schema.TypeString, + Computed: true, + }, + }, + } +} +func resourceArmDataShareDataSetKustoClusterCreate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataShare.DataSetClient + ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + shareId, err := parse.DataShareID(d.Get("share_id").(string)) + if err != nil { + return err + } + + existing, err := client.Get(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("checking for present of existing DataShare Kusto Cluster DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) + } + } + existingId := helper.GetAzurermDataShareDataSetId(existing.Value) + if existingId != nil && *existingId != "" { + return tf.ImportAsExistsError("azurerm_data_share_dataset_kusto_cluster", *existingId) + } + + dataSet := datashare.KustoClusterDataSet{ + Kind: datashare.KindKustoCluster, + KustoClusterDataSetProperties: &datashare.KustoClusterDataSetProperties{ + KustoClusterResourceID: utils.String(d.Get("kusto_cluster_id").(string)), + }, + } + + if _, err := client.Create(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name, dataSet); err != nil { + return fmt.Errorf("creating DataShare Kusto Cluster DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) + } + + resp, err := client.Get(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name) + if err != nil { + return fmt.Errorf("retrieving DataShare Kusto Cluster DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) + } + + respId := helper.GetAzurermDataShareDataSetId(resp.Value) + if respId == nil || *respId == "" { + return fmt.Errorf("empty or nil ID returned for DataShare Kusto Cluster DataSet %q (Resource Group %q / accountName %q / shareName %q)", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name) + } + + d.SetId(*respId) + return resourceArmDataShareDataSetKustoClusterRead(d, meta) +} + +func resourceArmDataShareDataSetKustoClusterRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataShare.DataSetClient + shareClient := meta.(*clients.Client).DataShare.SharesClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.DataShareDataSetID(d.Id()) + if err != nil { + return err + } + + respModel, err := client.Get(ctx, id.ResourceGroup, id.AccountName, id.ShareName, id.Name) + if err != nil { + if utils.ResponseWasNotFound(respModel.Response) { + log.Printf("[INFO] DataShare %q does not exist - removing from state", d.Id()) + d.SetId("") + return nil + } + return fmt.Errorf("retrieving DataShare Kusto Cluster DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", id.Name, id.ResourceGroup, id.AccountName, id.ShareName, err) + } + + d.Set("name", id.Name) + shareResp, err := shareClient.Get(ctx, id.ResourceGroup, id.AccountName, id.ShareName) + if err != nil { + return fmt.Errorf("retrieving DataShare %q (Resource Group %q / accountName %q): %+v", id.ShareName, id.ResourceGroup, id.AccountName, err) + } + if shareResp.ID == nil || *shareResp.ID == "" { + return fmt.Errorf("empty or nil ID returned for DataShare %q (Resource Group %q / accountName %q)", id.ShareName, id.ResourceGroup, id.AccountName) + } + + d.Set("share_id", shareResp.ID) + + resp, ok := respModel.Value.AsKustoClusterDataSet() + if !ok { + return fmt.Errorf("dataShare dataset %q (Resource Group %q / accountName %q / shareName %q) is not kusto cluster dataset", id.Name, id.ResourceGroup, id.AccountName, id.ShareName) + } + if props := resp.KustoClusterDataSetProperties; props != nil { + d.Set("kusto_cluster_id", props.KustoClusterResourceID) + d.Set("display_name", props.DataSetID) + d.Set("kusto_cluster_location", props.Location) + } + + return nil +} + +func resourceArmDataShareDataSetKustoClusterDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataShare.DataSetClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.DataShareDataSetID(d.Id()) + if err != nil { + return err + } + + if _, err := client.Delete(ctx, id.ResourceGroup, id.AccountName, id.ShareName, id.Name); err != nil { + return fmt.Errorf("deleting DataShare Kusto Cluster DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", id.Name, id.ResourceGroup, id.AccountName, id.ShareName, err) + } + return nil +} diff --git a/azurerm/internal/services/datashare/resource_arm_data_share_dataset_kusto_database.go b/azurerm/internal/services/datashare/resource_arm_data_share_dataset_kusto_database.go new file mode 100644 index 000000000000..23b6264102b3 --- /dev/null +++ b/azurerm/internal/services/datashare/resource_arm_data_share_dataset_kusto_database.go @@ -0,0 +1,180 @@ +package datashare + +import ( + "fmt" + "log" + "time" + + "github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/helper" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/validate" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceArmDataShareDataSetKustoDatabase() *schema.Resource { + return &schema.Resource{ + Create: resourceArmDataShareDataSetKustoDatabaseCreate, + Read: resourceArmDataShareDataSetKustoDatabaseRead, + Update: nil, + Delete: resourceArmDataShareDataSetKustoDatabaseDelete, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.DataShareDataSetID(id) + return err + }), + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.DatashareDataSetName(), + }, + + "share_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.DataShareID, + }, + + "kusto_database_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: azure.ValidateResourceID, + }, + + "display_name": { + Type: schema.TypeString, + Computed: true, + }, + + "kusto_cluster_location": { + Type: schema.TypeString, + Computed: true, + }, + }, + } +} + +func resourceArmDataShareDataSetKustoDatabaseCreate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataShare.DataSetClient + ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + shareId, err := parse.DataShareID(d.Get("share_id").(string)) + if err != nil { + return err + } + + existing, err := client.Get(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("checking for present of existing DataShare Kusto Database DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) + } + } + existingId := helper.GetAzurermDataShareDataSetId(existing.Value) + if existingId != nil && *existingId != "" { + return tf.ImportAsExistsError("azurerm_data_share_dataset_kusto_database", *existingId) + } + + dataSet := datashare.KustoDatabaseDataSet{ + Kind: datashare.KindKustoDatabase, + KustoDatabaseDataSetProperties: &datashare.KustoDatabaseDataSetProperties{ + KustoDatabaseResourceID: utils.String(d.Get("kusto_database_id").(string)), + }, + } + + if _, err := client.Create(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name, dataSet); err != nil { + return fmt.Errorf("creating DataShare Kusto Database DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) + } + + resp, err := client.Get(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name) + if err != nil { + return fmt.Errorf("retrieving DataShare Kusto Database DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) + } + + respId := helper.GetAzurermDataShareDataSetId(resp.Value) + if respId == nil || *respId == "" { + return fmt.Errorf("empty or nil ID returned for DataShare Kusto Database DataSet %q (Resource Group %q / accountName %q / shareName %q)", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name) + } + + d.SetId(*respId) + return resourceArmDataShareDataSetKustoDatabaseRead(d, meta) +} + +func resourceArmDataShareDataSetKustoDatabaseRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataShare.DataSetClient + shareClient := meta.(*clients.Client).DataShare.SharesClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.DataShareDataSetID(d.Id()) + if err != nil { + return err + } + + respModel, err := client.Get(ctx, id.ResourceGroup, id.AccountName, id.ShareName, id.Name) + if err != nil { + if utils.ResponseWasNotFound(respModel.Response) { + log.Printf("[INFO] DataShare %q does not exist - removing from state", d.Id()) + d.SetId("") + return nil + } + return fmt.Errorf("retrieving DataShare Kusto Database DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", id.Name, id.ResourceGroup, id.AccountName, id.ShareName, err) + } + + d.Set("name", id.Name) + shareResp, err := shareClient.Get(ctx, id.ResourceGroup, id.AccountName, id.ShareName) + if err != nil { + return fmt.Errorf("retrieving DataShare %q (Resource Group %q / accountName %q): %+v", id.ShareName, id.ResourceGroup, id.AccountName, err) + } + if shareResp.ID == nil || *shareResp.ID == "" { + return fmt.Errorf("empty or nil ID returned for DataShare %q (Resource Group %q / accountName %q)", id.ShareName, id.ResourceGroup, id.AccountName) + } + + d.Set("share_id", shareResp.ID) + + resp, ok := respModel.Value.AsKustoDatabaseDataSet() + if !ok { + return fmt.Errorf("dataShare %q (Resource Group %q / accountName %q) is not Kusto Database DataSet", id.ShareName, id.ResourceGroup, id.AccountName) + } + if props := resp.KustoDatabaseDataSetProperties; props != nil { + d.Set("kusto_database_id", props.KustoDatabaseResourceID) + d.Set("display_name", props.DataSetID) + d.Set("kusto_cluster_location", props.Location) + } + + return nil +} + +func resourceArmDataShareDataSetKustoDatabaseDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataShare.DataSetClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.DataShareDataSetID(d.Id()) + if err != nil { + return err + } + + if _, err := client.Delete(ctx, id.ResourceGroup, id.AccountName, id.ShareName, id.Name); err != nil { + return fmt.Errorf("deleting DataShare Kusto Database DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", id.Name, id.ResourceGroup, id.AccountName, id.ShareName, err) + } + return nil +} diff --git a/azurerm/internal/services/datashare/tests/data_source_data_share_dataset_blob_storage_test.go b/azurerm/internal/services/datashare/tests/data_source_data_share_dataset_blob_storage_test.go new file mode 100644 index 000000000000..fadc61fa8246 --- /dev/null +++ b/azurerm/internal/services/datashare/tests/data_source_data_share_dataset_blob_storage_test.go @@ -0,0 +1,45 @@ +package tests + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" +) + +func TestAccDataSourceAzureRMDataShareDatasetBlobStorage_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_data_share_dataset_blob_storage", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMDataShareDataSetDestroy("azurerm_data_share_dataset_blob_storage"), + Steps: []resource.TestStep{ + { + Config: testAccDataSourceDataShareDatasetBlobStorage_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataShareDataSetExists(data.ResourceName), + resource.TestCheckResourceAttrSet(data.ResourceName, "container_name"), + resource.TestCheckResourceAttrSet(data.ResourceName, "storage_account_name"), + resource.TestCheckResourceAttrSet(data.ResourceName, "storage_account_resource_group_name"), + resource.TestCheckResourceAttrSet(data.ResourceName, "storage_account_subscription_id"), + resource.TestCheckResourceAttrSet(data.ResourceName, "file_path"), + resource.TestCheckResourceAttrSet(data.ResourceName, "display_name"), + ), + }, + }, + }) +} + +func testAccDataSourceDataShareDatasetBlobStorage_basic(data acceptance.TestData) string { + config := testAccAzureRMDataShareDataSetBlobStorageFile_basic(data) + return fmt.Sprintf(` +%s + +data "azurerm_data_share_dataset_blob_storage" "test" { + name = azurerm_data_share_dataset_blob_storage.test.name + share_id = azurerm_data_share_dataset_blob_storage.test.share_id +} +`, config) +} diff --git a/azurerm/internal/services/datashare/tests/data_source_data_share_dataset_data_lake_gen1_test.go b/azurerm/internal/services/datashare/tests/data_source_data_share_dataset_data_lake_gen1_test.go new file mode 100644 index 000000000000..0fe5427d5b99 --- /dev/null +++ b/azurerm/internal/services/datashare/tests/data_source_data_share_dataset_data_lake_gen1_test.go @@ -0,0 +1,44 @@ +package tests + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" +) + +func TestAccDataSourceAzureRMDataShareDatasetDataLakeGen1_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_data_share_dataset_data_lake_gen1", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMDataShareDataSetDestroy("azurerm_data_share_dataset_data_lake_gen1"), + Steps: []resource.TestStep{ + { + Config: testAccDataSourceDataShareDatasetDataLakeGen1_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataShareDataSetExists(data.ResourceName), + resource.TestCheckResourceAttrSet(data.ResourceName, "data_lake_store_name"), + resource.TestCheckResourceAttrSet(data.ResourceName, "data_lake_store_resource_group_name"), + resource.TestCheckResourceAttrSet(data.ResourceName, "data_lake_store_subscription_id"), + resource.TestCheckResourceAttrSet(data.ResourceName, "file_name"), + resource.TestCheckResourceAttrSet(data.ResourceName, "display_name"), + ), + }, + }, + }) +} + +func testAccDataSourceDataShareDatasetDataLakeGen1_basic(data acceptance.TestData) string { + config := testAccAzureRMDataShareDataSetDataLakeGen1File_basic(data) + return fmt.Sprintf(` +%s + +data "azurerm_data_share_dataset_data_lake_gen1" "test" { + name = azurerm_data_share_dataset_data_lake_gen1.test.name + share_id = azurerm_data_share_dataset_data_lake_gen1.test.share_id +} +`, config) +} diff --git a/azurerm/internal/services/datashare/tests/data_source_data_share_dataset_data_lake_gen2_test.go b/azurerm/internal/services/datashare/tests/data_source_data_share_dataset_data_lake_gen2_test.go new file mode 100644 index 000000000000..de04a5a82073 --- /dev/null +++ b/azurerm/internal/services/datashare/tests/data_source_data_share_dataset_data_lake_gen2_test.go @@ -0,0 +1,45 @@ +package tests + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" +) + +func TestAccDataSourceAzureRMDataShareDatasetDataLakeGen2_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_data_share_dataset_data_lake_gen2", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMDataShareDataSetDestroy("azurerm_data_share_dataset_data_lake_gen2"), + Steps: []resource.TestStep{ + { + Config: testAccDataSourceDataShareDatasetDataLakeGen2_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataShareDataSetExists(data.ResourceName), + resource.TestCheckResourceAttrSet(data.ResourceName, "storage_account_name"), + resource.TestCheckResourceAttrSet(data.ResourceName, "storage_account_resource_group_name"), + resource.TestCheckResourceAttrSet(data.ResourceName, "storage_account_subscription_id"), + resource.TestCheckResourceAttrSet(data.ResourceName, "file_system_name"), + resource.TestCheckResourceAttrSet(data.ResourceName, "file_path"), + resource.TestCheckResourceAttrSet(data.ResourceName, "display_name"), + ), + }, + }, + }) +} + +func testAccDataSourceDataShareDatasetDataLakeGen2_basic(data acceptance.TestData) string { + config := testAccAzureRMDataShareDataSetDataLakeGen2File_basic(data) + return fmt.Sprintf(` +%s + +data "azurerm_data_share_dataset_data_lake_gen2" "test" { + name = azurerm_data_share_dataset_data_lake_gen2.test.name + share_id = azurerm_data_share_dataset_data_lake_gen2.test.share_id +} +`, config) +} diff --git a/azurerm/internal/services/datashare/tests/data_source_data_share_dataset_kusto_cluster_test.go b/azurerm/internal/services/datashare/tests/data_source_data_share_dataset_kusto_cluster_test.go new file mode 100644 index 000000000000..1371bd29829a --- /dev/null +++ b/azurerm/internal/services/datashare/tests/data_source_data_share_dataset_kusto_cluster_test.go @@ -0,0 +1,42 @@ +package tests + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" +) + +func TestAccDataSourceAzureRMDataShareDatasetKustoCluster_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_data_share_dataset_kusto_cluster", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMDataShareDataSetDestroy("azurerm_data_share_dataset_kusto_cluster"), + Steps: []resource.TestStep{ + { + Config: testAccDataSourceDataShareDatasetKustoCluster_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataShareDataSetExists(data.ResourceName), + resource.TestCheckResourceAttrSet(data.ResourceName, "kusto_cluster_id"), + resource.TestCheckResourceAttrSet(data.ResourceName, "display_name"), + resource.TestCheckResourceAttrSet(data.ResourceName, "kusto_cluster_location"), + ), + }, + }, + }) +} + +func testAccDataSourceDataShareDatasetKustoCluster_basic(data acceptance.TestData) string { + config := testAccAzureRMDataShareDataSetKustoCluster_basic(data) + return fmt.Sprintf(` +%s + +data "azurerm_data_share_dataset_kusto_cluster" "test" { + name = azurerm_data_share_dataset_kusto_cluster.test.name + share_id = azurerm_data_share_dataset_kusto_cluster.test.share_id +} +`, config) +} diff --git a/azurerm/internal/services/datashare/tests/data_source_data_share_dataset_kusto_database_test.go b/azurerm/internal/services/datashare/tests/data_source_data_share_dataset_kusto_database_test.go new file mode 100644 index 000000000000..c67e6d7700b5 --- /dev/null +++ b/azurerm/internal/services/datashare/tests/data_source_data_share_dataset_kusto_database_test.go @@ -0,0 +1,42 @@ +package tests + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" +) + +func TestAccDataSourceAzureRMDataShareDatasetKustoDatabase_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_data_share_dataset_kusto_database", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMDataShareDataSetDestroy("azurerm_data_share_dataset_kusto_database"), + Steps: []resource.TestStep{ + { + Config: testAccDataSourceDataShareDatasetKustoDatabase_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataShareDataSetExists(data.ResourceName), + resource.TestCheckResourceAttrSet(data.ResourceName, "kusto_database_id"), + resource.TestCheckResourceAttrSet(data.ResourceName, "display_name"), + resource.TestCheckResourceAttrSet(data.ResourceName, "kusto_cluster_location"), + ), + }, + }, + }) +} + +func testAccDataSourceDataShareDatasetKustoDatabase_basic(data acceptance.TestData) string { + config := testAccAzureRMDataShareDataSetKustoDatabase_basic(data) + return fmt.Sprintf(` +%s + +data "azurerm_data_share_dataset_kusto_database" "test" { + name = azurerm_data_share_dataset_kusto_database.test.name + share_id = azurerm_data_share_dataset_kusto_database.test.share_id +} +`, config) +} diff --git a/azurerm/internal/services/datashare/tests/resource_arm_data_share_dataset_blob_storage_test.go b/azurerm/internal/services/datashare/tests/resource_arm_data_share_dataset_blob_storage_test.go new file mode 100644 index 000000000000..ae91276975f5 --- /dev/null +++ b/azurerm/internal/services/datashare/tests/resource_arm_data_share_dataset_blob_storage_test.go @@ -0,0 +1,269 @@ +package tests + +import ( + "fmt" + "os" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func TestAccAzureRMDataShareDataSetBlobStorageFile_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_share_dataset_blob_storage", "test") + + resource.Test(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMDataShareDataSetDestroy("azurerm_data_share_dataset_blob_storage"), + Steps: []resource.TestStep{ + { + Config: testAccAzureRMDataShareDataSetBlobStorageFile_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataShareDataSetExists(data.ResourceName), + resource.TestCheckResourceAttrSet(data.ResourceName, "display_name"), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccAzureRMDataShareDataSetBlobStorageFolder_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_share_dataset_blob_storage", "test") + + resource.Test(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMDataShareDataSetDestroy("azurerm_data_share_dataset_blob_storage"), + Steps: []resource.TestStep{ + { + Config: testAccAzureRMDataShareDataSetBlobStorageFolder_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataShareDataSetExists(data.ResourceName), + resource.TestCheckResourceAttrSet(data.ResourceName, "display_name"), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccAzureRMDataShareDataSetBlobStorageContainer_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_share_dataset_blob_storage", "test") + + resource.Test(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMDataShareDataSetDestroy("azurerm_data_share_dataset_blob_storage"), + Steps: []resource.TestStep{ + { + Config: testAccAzureRMDataShareDataSetBlobStorageContainer_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataShareDataSetExists(data.ResourceName), + resource.TestCheckResourceAttrSet(data.ResourceName, "display_name"), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccAzureRMDataShareDataSetBlobStorage_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_share_dataset_blob_storage", "test") + + resource.Test(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMDataShareDataSetDestroy("azurerm_data_share_dataset_blob_storage"), + Steps: []resource.TestStep{ + { + Config: testAccAzureRMDataShareDataSetBlobStorageFile_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataShareDataSetExists(data.ResourceName), + ), + }, + data.RequiresImportErrorStep(testAccAzureRMDataShareDataSetBlobStorage_requiresImport), + }, + }) +} + +func testCheckAzureRMDataShareDataSetExists(resourceName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + client := acceptance.AzureProvider.Meta().(*clients.Client).DataShare.DataSetClient + ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("DataShare DataSet not found: %s", resourceName) + } + id, err := parse.DataShareDataSetID(rs.Primary.ID) + if err != nil { + return err + } + if resp, err := client.Get(ctx, id.ResourceGroup, id.AccountName, id.ShareName, id.Name); err != nil { + if !utils.ResponseWasNotFound(resp.Response) { + return fmt.Errorf("bad: data share data set %q does not exist", id.Name) + } + return fmt.Errorf("bad: Get on DataShare DataSet Client: %+v", err) + } + return nil + } +} + +func testCheckAzureRMDataShareDataSetDestroy(resourceTypeName string) func(s *terraform.State) error { + return func(s *terraform.State) error { + client := acceptance.AzureProvider.Meta().(*clients.Client).DataShare.DataSetClient + ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + + for _, rs := range s.RootModule().Resources { + if rs.Type != resourceTypeName { + continue + } + id, err := parse.DataShareDataSetID(rs.Primary.ID) + if err != nil { + return err + } + if resp, err := client.Get(ctx, id.ResourceGroup, id.AccountName, id.ShareName, id.Name); err != nil { + if !utils.ResponseWasNotFound(resp.Response) { + return fmt.Errorf("bad: get on data share data set client: %+v", err) + } + } + return nil + } + return nil + } +} + +func testAccAzureRMDataShareDataSetBlobStorage_template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +provider "azuread" { +} + +resource "azurerm_resource_group" "test" { + name = "acctest-datashare-%[1]d" + location = "%[2]s" +} + +resource "azurerm_data_share_account" "test" { + name = "acctest-dsa-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + identity { + type = "SystemAssigned" + } +} + +resource "azurerm_data_share" "test" { + name = "acctest_ds_%[1]d" + account_id = azurerm_data_share_account.test.id + kind = "CopyBased" +} + +resource "azurerm_storage_account" "test" { + name = "acctest%[3]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "RAGRS" +} + +resource "azurerm_storage_container" "test" { + name = "acctest-sc-%[1]d" + storage_account_name = azurerm_storage_account.test.name + container_access_type = "container" +} + +data "azuread_service_principal" "test" { + display_name = azurerm_data_share_account.test.name +} + +resource "azurerm_role_assignment" "test" { + scope = azurerm_storage_account.test.id + role_definition_name = "Storage Blob Data Reader" + principal_id = data.azuread_service_principal.test.object_id +} +`, data.RandomInteger, data.Locations.Primary, data.RandomIntOfLength(12)) +} + +func testAccAzureRMDataShareDataSetBlobStorageFile_basic(data acceptance.TestData) string { + config := testAccAzureRMDataShareDataSetBlobStorage_template(data) + return fmt.Sprintf(` +%[1]s + +resource "azurerm_data_share_dataset_blob_storage" "test" { + name = "acctest-dsbds-file-%[2]d" + share_id = azurerm_data_share.test.id + container_name = azurerm_storage_container.test.name + storage_account_name = azurerm_storage_account.test.name + storage_account_resource_group_name = azurerm_storage_account.test.resource_group_name + storage_account_subscription_id = "%[3]s" + file_path = "myfile.txt" + depends_on = [ + azurerm_role_assignment.test, + ] +} +`, config, data.RandomInteger, os.Getenv("ARM_SUBSCRIPTION_ID")) +} + +func testAccAzureRMDataShareDataSetBlobStorageFolder_basic(data acceptance.TestData) string { + config := testAccAzureRMDataShareDataSetBlobStorage_template(data) + return fmt.Sprintf(` +%[1]s + +resource "azurerm_data_share_dataset_blob_storage" "test" { + name = "acctest-dsbds-folder-%[2]d" + share_id = azurerm_data_share.test.id + container_name = azurerm_storage_container.test.name + storage_account_name = azurerm_storage_account.test.name + storage_account_resource_group_name = azurerm_storage_account.test.resource_group_name + storage_account_subscription_id = "%[3]s" + folder_path = "/test/" + depends_on = [ + azurerm_role_assignment.test, + ] +} +`, config, data.RandomInteger, os.Getenv("ARM_SUBSCRIPTION_ID")) +} + +func testAccAzureRMDataShareDataSetBlobStorageContainer_basic(data acceptance.TestData) string { + config := testAccAzureRMDataShareDataSetBlobStorage_template(data) + return fmt.Sprintf(` +%[1]s + +resource "azurerm_data_share_dataset_blob_storage" "test" { + name = "acctest-dsbds-folder-%[2]d" + share_id = azurerm_data_share.test.id + container_name = azurerm_storage_container.test.name + storage_account_name = azurerm_storage_account.test.name + storage_account_resource_group_name = azurerm_storage_account.test.resource_group_name + storage_account_subscription_id = "%[3]s" + depends_on = [ + azurerm_role_assignment.test, + ] +} +`, config, data.RandomInteger, os.Getenv("ARM_SUBSCRIPTION_ID")) +} + +func testAccAzureRMDataShareDataSetBlobStorage_requiresImport(data acceptance.TestData) string { + config := testAccAzureRMDataShareDataSetBlobStorageFile_basic(data) + return fmt.Sprintf(` +%s + +resource "azurerm_data_share_dataset_blob_storage" "import" { + name = azurerm_data_share_dataset_blob_storage.test.name + share_id = azurerm_data_share.test.id + container_name = azurerm_data_share_dataset_blob_storage.test.container_name + storage_account_name = azurerm_data_share_dataset_blob_storage.test.storage_account_name + storage_account_resource_group_name = azurerm_data_share_dataset_blob_storage.test.storage_account_resource_group_name + storage_account_subscription_id = azurerm_data_share_dataset_blob_storage.test.storage_account_subscription_id +} +`, config) +} diff --git a/azurerm/internal/services/datashare/tests/resource_arm_data_share_dataset_data_lake_gen1_test.go b/azurerm/internal/services/datashare/tests/resource_arm_data_share_dataset_data_lake_gen1_test.go new file mode 100644 index 000000000000..445ecc84fbe7 --- /dev/null +++ b/azurerm/internal/services/datashare/tests/resource_arm_data_share_dataset_data_lake_gen1_test.go @@ -0,0 +1,181 @@ +package tests + +import ( + "fmt" + "os" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" +) + +func TestAccAzureRMDataShareDataSetDataLakeGen1File_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_share_dataset_data_lake_gen1", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMDataShareDataSetDestroy("azurerm_data_share_dataset_data_lake_gen1"), + Steps: []resource.TestStep{ + { + Config: testAccAzureRMDataShareDataSetDataLakeGen1File_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataShareDataSetExists(data.ResourceName), + resource.TestCheckResourceAttrSet(data.ResourceName, "display_name"), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccAzureRMDataShareDataSetDataLakeGen1Folder_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_share_dataset_data_lake_gen1", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMDataShareDataSetDestroy("azurerm_data_share_dataset_data_lake_gen1"), + Steps: []resource.TestStep{ + { + Config: testAccAzureRMDataShareDataSetDataLakeGen1Folder_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataShareDataSetExists(data.ResourceName), + resource.TestCheckResourceAttrSet(data.ResourceName, "display_name"), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccAzureRMDataShareDataSetDataLakeGen1_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_share_dataset_data_lake_gen1", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMDataShareDataSetDestroy("azurerm_data_share_dataset_data_lake_gen1"), + Steps: []resource.TestStep{ + { + Config: testAccAzureRMDataShareDataSetDataLakeGen1File_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataShareDataSetExists(data.ResourceName), + ), + }, + data.RequiresImportErrorStep(testAccAzureRMDataShareDataSetDataLakeGen1_requiresImport), + }, + }) +} + +func testAccAzureRMDataShareDataSetDataLakeGen1_template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +provider "azuread" { +} + +resource "azurerm_resource_group" "test" { + name = "acctest-datashare-%[1]d" + location = "%[2]s" +} + +resource "azurerm_data_share_account" "test" { + name = "acctest-dsa-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + identity { + type = "SystemAssigned" + } + + tags = { + env = "Test" + } +} + +resource "azurerm_data_share" "test" { + name = "acctest_ds_%[1]d" + account_id = azurerm_data_share_account.test.id + kind = "CopyBased" +} + +resource "azurerm_data_lake_store" "test" { + name = "acctestdls%[3]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + firewall_state = "Disabled" +} + +resource "azurerm_data_lake_store_file" "test" { + account_name = azurerm_data_lake_store.test.name + local_file_path = "./testdata/application_gateway_test.cer" + remote_file_path = "/test/application_gateway_test.cer" +} + +data "azuread_service_principal" "test" { + display_name = azurerm_data_share_account.test.name +} + +resource "azurerm_role_assignment" "test" { + scope = azurerm_data_lake_store.test.id + role_definition_name = "Owner" + principal_id = data.azuread_service_principal.test.object_id +} +`, data.RandomInteger, data.Locations.Primary, data.RandomIntOfLength(12)) +} + +func testAccAzureRMDataShareDataSetDataLakeGen1File_basic(data acceptance.TestData) string { + config := testAccAzureRMDataShareDataSetDataLakeGen1_template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_data_share_dataset_data_lake_gen1" "test" { + name = "acctest-dlds-%d" + share_id = azurerm_data_share.test.id + data_lake_store_name = azurerm_data_lake_store.test.name + data_lake_store_resource_group_name = azurerm_data_lake_store.test.resource_group_name + data_lake_store_subscription_id = "%s" + file_name = "application_gateway_test.cer" + folder_path = "/test/" + depends_on = [ + azurerm_role_assignment.test, + ] +} +`, config, data.RandomInteger, os.Getenv("ARM_SUBSCRIPTION_ID")) +} + +func testAccAzureRMDataShareDataSetDataLakeGen1Folder_basic(data acceptance.TestData) string { + config := testAccAzureRMDataShareDataSetDataLakeGen1_template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_data_share_dataset_data_lake_gen1" "test" { + name = "acctest-dlds-%d" + share_id = azurerm_data_share.test.id + data_lake_store_name = azurerm_data_lake_store.test.name + data_lake_store_resource_group_name = azurerm_data_lake_store.test.resource_group_name + data_lake_store_subscription_id = "%s" + folder_path = "/test/" + depends_on = [ + azurerm_role_assignment.test, + ] +} +`, config, data.RandomInteger, os.Getenv("ARM_SUBSCRIPTION_ID")) +} + +func testAccAzureRMDataShareDataSetDataLakeGen1_requiresImport(data acceptance.TestData) string { + config := testAccAzureRMDataShareDataSetDataLakeGen1File_basic(data) + return fmt.Sprintf(` +%s +resource "azurerm_data_share_dataset_data_lake_gen1" "import" { + name = azurerm_data_share_dataset_data_lake_gen1.test.name + share_id = azurerm_data_share.test.id + data_lake_store_name = azurerm_data_share_dataset_data_lake_gen1.test.data_lake_store_name + data_lake_store_resource_group_name = azurerm_data_share_dataset_data_lake_gen1.test.data_lake_store_resource_group_name + data_lake_store_subscription_id = azurerm_data_share_dataset_data_lake_gen1.test.data_lake_store_subscription_id + folder_path = azurerm_data_share_dataset_data_lake_gen1.test.folder_path +} +`, config) +} diff --git a/azurerm/internal/services/datashare/tests/resource_arm_data_share_dataset_data_lake_gen2_test.go b/azurerm/internal/services/datashare/tests/resource_arm_data_share_dataset_data_lake_gen2_test.go new file mode 100644 index 000000000000..ae4551a137b4 --- /dev/null +++ b/azurerm/internal/services/datashare/tests/resource_arm_data_share_dataset_data_lake_gen2_test.go @@ -0,0 +1,220 @@ +package tests + +import ( + "fmt" + "os" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" +) + +func TestAccAzureRMDataShareDataSetDataLakeGen2File_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_share_dataset_data_lake_gen2", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMDataShareDataSetDestroy("azurerm_data_share_dataset_data_lake_gen2"), + Steps: []resource.TestStep{ + { + Config: testAccAzureRMDataShareDataSetDataLakeGen2File_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataShareDataSetExists(data.ResourceName), + resource.TestCheckResourceAttrSet(data.ResourceName, "display_name"), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccAzureRMDataShareDataSetDataLakeGen2Folder_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_share_dataset_data_lake_gen2", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMDataShareDataSetDestroy("azurerm_data_share_dataset_data_lake_gen2"), + Steps: []resource.TestStep{ + { + Config: testAccAzureRMDataShareDataSetDataLakeGen2Folder_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataShareDataSetExists(data.ResourceName), + resource.TestCheckResourceAttrSet(data.ResourceName, "display_name"), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccAzureRMDataShareDataSetDataLakeGen2FileSystem_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_share_dataset_data_lake_gen2", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMDataShareDataSetDestroy("azurerm_data_share_dataset_data_lake_gen2"), + Steps: []resource.TestStep{ + { + Config: testAccAzureRMDataShareDataSetDataLakeGen2FileSystem_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataShareDataSetExists(data.ResourceName), + resource.TestCheckResourceAttrSet(data.ResourceName, "display_name"), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccAzureRMDataShareDataLakeGen2DataSet_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_share_dataset_data_lake_gen2", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMDataShareDataSetDestroy("azurerm_data_share_dataset_data_lake_gen2"), + Steps: []resource.TestStep{ + { + Config: testAccAzureRMDataShareDataSetDataLakeGen2File_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataShareDataSetExists(data.ResourceName), + ), + }, + data.RequiresImportErrorStep(testAccAzureRMDataShareDataLakeGen2DataSet_requiresImport), + }, + }) +} + +func testAccAzureRMDataShareDataLakeGen2DataSet_template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +provider "azuread" { +} + +resource "azurerm_resource_group" "test" { + name = "acctest-datashare-%[1]d" + location = "%[2]s" +} + +resource "azurerm_data_share_account" "test" { + name = "acctest-dsa-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + identity { + type = "SystemAssigned" + } +} + +resource "azurerm_data_share" "test" { + name = "acctest_ds_%[1]d" + account_id = azurerm_data_share_account.test.id + kind = "CopyBased" +} + +resource "azurerm_storage_account" "test" { + name = "accteststr%[3]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_kind = "BlobStorage" + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_storage_data_lake_gen2_filesystem" "test" { + name = "acctest-%[1]d" + storage_account_id = azurerm_storage_account.test.id +} + +data "azuread_service_principal" "test" { + display_name = azurerm_data_share_account.test.name +} + +resource "azurerm_role_assignment" "test" { + scope = azurerm_storage_account.test.id + role_definition_name = "Storage Blob Data Reader" + principal_id = data.azuread_service_principal.test.object_id +} +`, data.RandomInteger, data.Locations.Primary, data.RandomIntOfLength(12)) +} + +func testAccAzureRMDataShareDataSetDataLakeGen2File_basic(data acceptance.TestData) string { + config := testAccAzureRMDataShareDataLakeGen2DataSet_template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_data_share_dataset_data_lake_gen2" "test" { + name = "acctest-dlds-%d" + share_id = azurerm_data_share.test.id + storage_account_name = azurerm_storage_account.test.name + storage_account_resource_group_name = azurerm_storage_account.test.resource_group_name + storage_account_subscription_id = "%s" + file_system_name = azurerm_storage_data_lake_gen2_filesystem.test.name + file_path = "myfile.txt" + depends_on = [ + azurerm_role_assignment.test, + ] +} +`, config, data.RandomInteger, os.Getenv("ARM_SUBSCRIPTION_ID")) +} + +func testAccAzureRMDataShareDataSetDataLakeGen2Folder_basic(data acceptance.TestData) string { + config := testAccAzureRMDataShareDataLakeGen2DataSet_template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_data_share_dataset_data_lake_gen2" "test" { + name = "acctest-dlds-%d" + share_id = azurerm_data_share.test.id + storage_account_name = azurerm_storage_account.test.name + storage_account_resource_group_name = azurerm_storage_account.test.resource_group_name + storage_account_subscription_id = "%s" + file_system_name = azurerm_storage_data_lake_gen2_filesystem.test.name + folder_path = "/test/" + depends_on = [ + azurerm_role_assignment.test, + ] +} +`, config, data.RandomInteger, os.Getenv("ARM_SUBSCRIPTION_ID")) +} + +func testAccAzureRMDataShareDataSetDataLakeGen2FileSystem_basic(data acceptance.TestData) string { + config := testAccAzureRMDataShareDataLakeGen2DataSet_template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_data_share_dataset_data_lake_gen2" "test" { + name = "acctest-dlds-%d" + share_id = azurerm_data_share.test.id + storage_account_name = azurerm_storage_account.test.name + storage_account_resource_group_name = azurerm_storage_account.test.resource_group_name + storage_account_subscription_id = "%s" + file_system_name = azurerm_storage_data_lake_gen2_filesystem.test.name + depends_on = [ + azurerm_role_assignment.test, + ] +} +`, config, data.RandomInteger, os.Getenv("ARM_SUBSCRIPTION_ID")) +} + +func testAccAzureRMDataShareDataLakeGen2DataSet_requiresImport(data acceptance.TestData) string { + config := testAccAzureRMDataShareDataSetDataLakeGen2File_basic(data) + return fmt.Sprintf(` +%s + +resource "azurerm_data_share_dataset_data_lake_gen2" "import" { + name = azurerm_data_share_dataset_data_lake_gen2.test.name + share_id = azurerm_data_share.test.id + storage_account_name = azurerm_data_share_dataset_data_lake_gen2.test.storage_account_name + storage_account_resource_group_name = azurerm_data_share_dataset_data_lake_gen2.test.storage_account_resource_group_name + storage_account_subscription_id = azurerm_data_share_dataset_data_lake_gen2.test.storage_account_subscription_id + file_system_name = azurerm_data_share_dataset_data_lake_gen2.test.file_system_name + file_path = azurerm_data_share_dataset_data_lake_gen2.test.file_path +} +`, config) +} diff --git a/azurerm/internal/services/datashare/tests/resource_arm_data_share_dataset_kusto_cluster_test.go b/azurerm/internal/services/datashare/tests/resource_arm_data_share_dataset_kusto_cluster_test.go new file mode 100644 index 000000000000..f701120b1a48 --- /dev/null +++ b/azurerm/internal/services/datashare/tests/resource_arm_data_share_dataset_kusto_cluster_test.go @@ -0,0 +1,130 @@ +package tests + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" +) + +func TestAccAzureRMDataShareDataSetKustoCluster_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_share_dataset_kusto_cluster", "test") + + resource.Test(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMDataShareDataSetDestroy("azurerm_data_share_dataset_kusto_cluster"), + Steps: []resource.TestStep{ + { + Config: testAccAzureRMDataShareDataSetKustoCluster_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataShareDataSetExists(data.ResourceName), + resource.TestCheckResourceAttrSet(data.ResourceName, "display_name"), + resource.TestCheckResourceAttrSet(data.ResourceName, "kusto_cluster_location"), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccAzureRMDataShareDataSetKustoCluster_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_share_dataset_kusto_cluster", "test") + + resource.Test(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMDataShareDataSetDestroy("azurerm_data_share_dataset_kusto_cluster"), + Steps: []resource.TestStep{ + { + Config: testAccAzureRMDataShareDataSetKustoCluster_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataShareDataSetExists(data.ResourceName), + ), + }, + data.RequiresImportErrorStep(testAccAzureRMDataShareDataSetKustoCluster_requiresImport), + }, + }) +} + +func testAccAzureRMDataShareDataSetKustoCluster_template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +provider "azuread" { +} + +resource "azurerm_resource_group" "test" { + name = "acctest-datashare-%[1]d" + location = "%[2]s" +} + +resource "azurerm_data_share_account" "test" { + name = "acctest-dsa-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + identity { + type = "SystemAssigned" + } +} + +resource "azurerm_data_share" "test" { + name = "acctest_ds_%[1]d" + account_id = azurerm_data_share_account.test.id + kind = "InPlace" +} + +resource "azurerm_kusto_cluster" "test" { + name = "acctestkc%[3]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + sku { + name = "Dev(No SLA)_Standard_D11_v2" + capacity = 1 + } +} + +data "azuread_service_principal" "test" { + display_name = azurerm_data_share_account.test.name +} + +resource "azurerm_role_assignment" "test" { + scope = azurerm_kusto_cluster.test.id + role_definition_name = "Contributor" + principal_id = data.azuread_service_principal.test.object_id +} +`, data.RandomInteger, data.Locations.Primary, data.RandomIntOfLength(12)) +} + +func testAccAzureRMDataShareDataSetKustoCluster_basic(data acceptance.TestData) string { + config := testAccAzureRMDataShareDataSetKustoCluster_template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_data_share_dataset_kusto_cluster" "test" { + name = "acctest-dsbds-%d" + share_id = azurerm_data_share.test.id + kusto_cluster_id = azurerm_kusto_cluster.test.id + depends_on = [ + azurerm_role_assignment.test, + ] +} +`, config, data.RandomInteger) +} + +func testAccAzureRMDataShareDataSetKustoCluster_requiresImport(data acceptance.TestData) string { + config := testAccAzureRMDataShareDataSetKustoCluster_basic(data) + return fmt.Sprintf(` +%s + +resource "azurerm_data_share_dataset_kusto_cluster" "import" { + name = azurerm_data_share_dataset_kusto_cluster.test.name + share_id = azurerm_data_share.test.id + kusto_cluster_id = azurerm_kusto_cluster.test.id +} +`, config) +} diff --git a/azurerm/internal/services/datashare/tests/resource_arm_data_share_dataset_kusto_database_test.go b/azurerm/internal/services/datashare/tests/resource_arm_data_share_dataset_kusto_database_test.go new file mode 100644 index 000000000000..d2b2a7c9bd60 --- /dev/null +++ b/azurerm/internal/services/datashare/tests/resource_arm_data_share_dataset_kusto_database_test.go @@ -0,0 +1,137 @@ +package tests + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" +) + +func TestAccAzureRMDataShareDataSetKustoDatabase_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_share_dataset_kusto_database", "test") + + resource.Test(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMDataShareDataSetDestroy("azurerm_data_share_dataset_kusto_database"), + Steps: []resource.TestStep{ + { + Config: testAccAzureRMDataShareDataSetKustoDatabase_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataShareDataSetExists(data.ResourceName), + resource.TestCheckResourceAttrSet(data.ResourceName, "display_name"), + resource.TestCheckResourceAttrSet(data.ResourceName, "kusto_cluster_location"), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccAzureRMDataShareDataSetKustoDatabase_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_share_dataset_kusto_database", "test") + + resource.Test(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMDataShareDataSetDestroy("azurerm_data_share_dataset_kusto_database"), + Steps: []resource.TestStep{ + { + Config: testAccAzureRMDataShareDataSetKustoDatabase_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataShareDataSetExists(data.ResourceName), + ), + }, + data.RequiresImportErrorStep(testAccAzureRMDataShareDataSetKustoDatabase_requiresImport), + }, + }) +} + +func testAccAzureRMDataShareDataSetKustoDatabase_template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +provider "azuread" { +} + +resource "azurerm_resource_group" "test" { + name = "acctest-datashare-%[1]d" + location = "%[2]s" +} + +resource "azurerm_data_share_account" "test" { + name = "acctest-dsa-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + identity { + type = "SystemAssigned" + } +} + +resource "azurerm_data_share" "test" { + name = "acctest_ds_%[1]d" + account_id = azurerm_data_share_account.test.id + kind = "InPlace" +} + +resource "azurerm_kusto_cluster" "test" { + name = "acctestkc%[3]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + sku { + name = "Dev(No SLA)_Standard_D11_v2" + capacity = 1 + } +} + +resource "azurerm_kusto_database" "test" { + name = "acctestkd-%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_name = azurerm_kusto_cluster.test.name +} + +data "azuread_service_principal" "test" { + display_name = azurerm_data_share_account.test.name +} + +resource "azurerm_role_assignment" "test" { + scope = azurerm_kusto_cluster.test.id + role_definition_name = "Contributor" + principal_id = data.azuread_service_principal.test.object_id +} +`, data.RandomInteger, data.Locations.Primary, data.RandomIntOfLength(12)) +} + +func testAccAzureRMDataShareDataSetKustoDatabase_basic(data acceptance.TestData) string { + config := testAccAzureRMDataShareDataSetKustoDatabase_template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_data_share_dataset_kusto_database" "test" { + name = "acctest-dsbds-%d" + share_id = azurerm_data_share.test.id + kusto_database_id = azurerm_kusto_database.test.id + depends_on = [ + azurerm_role_assignment.test, + ] +} +`, config, data.RandomInteger) +} + +func testAccAzureRMDataShareDataSetKustoDatabase_requiresImport(data acceptance.TestData) string { + config := testAccAzureRMDataShareDataSetKustoDatabase_basic(data) + return fmt.Sprintf(` +%s + +resource "azurerm_data_share_dataset_kusto_database" "import" { + name = azurerm_data_share_dataset_kusto_database.test.name + share_id = azurerm_data_share.test.id + kusto_database_id = azurerm_kusto_database.test.id +} +`, config) +} diff --git a/azurerm/internal/services/datashare/tests/testdata/application_gateway_test.cer b/azurerm/internal/services/datashare/tests/testdata/application_gateway_test.cer new file mode 100644 index 000000000000..577947b220d7 --- /dev/null +++ b/azurerm/internal/services/datashare/tests/testdata/application_gateway_test.cer @@ -0,0 +1,21 @@ +-----BEGIN CERTIFICATE----- +MIIDbzCCAlegAwIBAgIJAIzjRD36sIbbMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNV +BAYTAlVTMRMwEQYDVQQIDApTb21lLVN0YXRlMRIwEAYDVQQKDAl0ZXJyYWZvcm0x +FTATBgNVBAMMDHRlcnJhZm9ybS5pbzAgFw0xNzA0MjEyMDA1MjdaGA8yMTE3MDMy +ODIwMDUyN1owTTELMAkGA1UEBhMCVVMxEzARBgNVBAgMClNvbWUtU3RhdGUxEjAQ +BgNVBAoMCXRlcnJhZm9ybTEVMBMGA1UEAwwMdGVycmFmb3JtLmlvMIIBIjANBgkq +hkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA3L9L5szT4+FLykTFNyyPjy/k3BQTYAfR +QzP2dhnsuUKm3cdPC0NyZ+wEXIUGhoDO2YG6EYChOl8fsDqDOjloSUGKqYw++nlp +HIuUgJx8IxxG2XkALCjFU7EmF+w7kn76d0ezpEIYxnLP+KG2DVornoEt1aLhv1ML +mpgEZZPhDbMSLhSYWeTVRMayXLwqtfgnDumQSB+8d/1JuJqrSI4pD12JozVThzb6 +hsjfb6RMX4epPmrGn0PbTPEEA6awmsxBCXB0s13nNQt/O0hLM2agwvAyozilQV+s +616Ckgk6DJoUkqZhDy7vPYMIRSr98fBws6zkrV6tTLjmD8xAvobePQIDAQABo1Aw +TjAdBgNVHQ4EFgQUXIqO421zMMmbcRRX9wctZFCQuPIwHwYDVR0jBBgwFoAUXIqO +421zMMmbcRRX9wctZFCQuPIwDAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQsFAAOC +AQEAr82NeT3BYJOKLlUL6Om5LjUF66ewcJjG9ltdvyQwVneMcq7t5UAPxgChzqNR +Vk4da8PzkXpjBJyWezHupdJNX3XqeUk2kSxqQ6/gmhqvfI3y7djrwoO6jvMEY26W +qtkTNORWDP3THJJVimC3zV+KMU5UBVrEzhOVhHSU709lBP75o0BBn3xGsPqSq9k8 +IotIFfyAc6a+XP3+ZMpvh7wqAUml7vWa5wlcXExCx39h1balfDSLGNC4swWPCp9A +MnQR0p+vMay9hNP1Eh+9QYUai14d5KS3cFV+KxE1cJR5HD/iLltnnOEbpMsB0eVO +ZWkFvE7Y5lW0oVSAfin5TwTJMQ== +-----END CERTIFICATE----- \ No newline at end of file diff --git a/azurerm/internal/services/datashare/validate/data_share.go b/azurerm/internal/services/datashare/validate/data_share.go index c1a393af2b1b..0176f55ce38b 100644 --- a/azurerm/internal/services/datashare/validate/data_share.go +++ b/azurerm/internal/services/datashare/validate/data_share.go @@ -40,3 +40,23 @@ func DataShareSyncName() schema.SchemaValidateFunc { regexp.MustCompile(`^[^&%#/]{1,90}$`), `Data share snapshot schedule name should have length of 1 - 90, and cannot contain &%#/`, ) } + +func DataShareID(i interface{}, k string) (warnings []string, errors []error) { + v, ok := i.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) + return warnings, errors + } + + if _, err := parse.DataShareID(v); err != nil { + errors = append(errors, fmt.Errorf("can not parse %q as a data share id: %v", k, err)) + } + + return warnings, errors +} + +func DatashareDataSetName() schema.SchemaValidateFunc { + return validation.StringMatch( + regexp.MustCompile(`^[\w-]{2,90}$`), `Dataset name can only contain number, letters, - and _, and must be between 2 and 90 characters long.`, + ) +} diff --git a/website/azurerm.erb b/website/azurerm.erb index 972c812fe1e3..685765ef3cad 100644 --- a/website/azurerm.erb +++ b/website/azurerm.erb @@ -197,6 +197,26 @@ azurerm_data_share_account +
  • + azurerm_data_share_dataset_blob_storage +
  • + +
  • + azurerm_data_share_dataset_data_lake_gen1 +
  • + +
  • + azurerm_data_share_dataset_data_lake_gen2 +
  • + +
  • + azurerm_data_share_dataset_kusto_cluster +
  • + +
  • + azurerm_data_share_dataset_kusto_database +
  • +
  • azurerm_dedicated_host
  • @@ -1443,6 +1463,21 @@
  • azurerm_data_share_account
  • +
  • + azurerm_data_share_dataset_blob_storage +
  • +
  • + azurerm_data_share_dataset_data_lake_gen1 +
  • +
  • + azurerm_data_share_dataset_data_lake_gen2 +
  • +
  • + azurerm_data_share_dataset_kusto_cluster +
  • +
  • + azurerm_data_share_dataset_kusto_database +
  • diff --git a/website/docs/d/data_share_dataset_blob_storage.html.markdown b/website/docs/d/data_share_dataset_blob_storage.html.markdown new file mode 100644 index 000000000000..180ea9ad193c --- /dev/null +++ b/website/docs/d/data_share_dataset_blob_storage.html.markdown @@ -0,0 +1,62 @@ +--- +subcategory: "Data Share" +layout: "azurerm" +page_title: "Azure Resource Manager: Data Source: azurerm_data_share_dataset_blob_storage" +description: |- + Gets information about an existing Data Share Blob Storage Dataset. +--- + +# Data Source: azurerm_data_share_dataset_blob_storage + +Use this data source to access information about an existing Data Share Blob Storage Dataset. + +## Example Usage + +```hcl +provider "azurerm" { + features {} +} + +data "azurerm_data_share_dataset_blob_storage" "example" { + name = "example-dsbsds" + share_id = "example-share-id" +} + +output "id" { + value = data.azurerm_data_share_dataset_blob_storage.example.id +} +``` + +## Arguments Reference + +The following arguments are supported: + +* `name` - (Required) The name of this Data Share Blob Storage Dataset. + +* `share_id` - (Required) The ID of the Data Share in which this Data Share Blob Storage Dataset should be created. + +## Attributes Reference + +In addition to the Arguments listed above - the following Attributes are exported: + +* `id` - The ID of the Data Share Blob Storage Dataset. + +* `container_name` - The name of the storage account container to be shared with the receiver. + +* `storage_account_name` - The name of the storage account to be shared with the receiver. + +* `storage_account_resource_group_name` - The resource group name of the storage account to be shared with the receiver. + +* `storage_account_subscription_id` - The subscription id of the storage account to be shared with the receiver. + +* `file_path` - The path of the file in the storage container to be shared with the receiver. + +* `folder_path` - The folder path of the file in the storage container to be shared with the receiver. + +* `display_name` - The name of the Data Share Dataset. + +## Timeouts + +The `timeouts` block allows you to specify [timeouts](https://www.terraform.io/docs/configuration/resources.html#timeouts) for certain actions: + +* `read` - (Defaults to 5 minutes) Used when retrieving the Data Share Blob Storage Dataset. diff --git a/website/docs/d/data_share_dataset_data_lake_gen1.html.markdown b/website/docs/d/data_share_dataset_data_lake_gen1.html.markdown new file mode 100644 index 000000000000..65931ad814fe --- /dev/null +++ b/website/docs/d/data_share_dataset_data_lake_gen1.html.markdown @@ -0,0 +1,60 @@ +--- +subcategory: "Data Share" +layout: "azurerm" +page_title: "Azure Resource Manager: Data Source: azurerm_data_share_dataset_data_lake_gen1" +description: |- + Gets information about an existing Data Share Data Lake Gen1 Dataset. +--- + +# Data Source: azurerm_data_share_dataset_data_lake_gen1 + +Use this data source to access information about an existing Data Share Data Lake Gen1 Dataset. + +## Example Usage + +```hcl +provider "azurerm" { + features {} +} + +data "azurerm_data_share_dataset_data_lake_gen1" "example" { + name = "example-dsdlg1ds" + share_id = "example-share-id" +} + +output "id" { + value = data.azurerm_data_share_dataset_data_lake_gen1.example.id +} +``` + +## Arguments Reference + +The following arguments are supported: + +* `name` - (Required) The name of this Data Share Data Lake Gen1 Dataset. + +* `share_id` - (Required) The ID of the Data Share in which this Data Share Data Lake Gen1 Dataset should be created. + +## Attributes Reference + +In addition to the Arguments listed above - the following Attributes are exported: + +* `id` - The ID of the Data Share Data Lake Gen1 Dataset. + +* `data_lake_store_name` - The name of the data lake store to be shared with the receiver. + +* `data_lake_store_resource_group_name` - The resource group name of the data lake store to be shared with the receiver. + +* `data_lake_store_subscription_id` - The subscription id of the data lake store to be shared with the receiver. + +* `folder_path` - The folder path of the data lake store to be shared with the receiver. + +* `file_name` - The file name in the folder path of the data lake store to be shared with the receiver. + +* `display_name` - The name of the Data Share Dataset. + +## Timeouts + +The `timeouts` block allows you to specify [timeouts](https://www.terraform.io/docs/configuration/resources.html#timeouts) for certain actions: + +* `read` - (Defaults to 5 minutes) Used when retrieving the Data Share Data Lake Gen1 Dataset. diff --git a/website/docs/d/data_share_dataset_data_lake_gen2.html.markdown b/website/docs/d/data_share_dataset_data_lake_gen2.html.markdown new file mode 100644 index 000000000000..e5cdc11f651a --- /dev/null +++ b/website/docs/d/data_share_dataset_data_lake_gen2.html.markdown @@ -0,0 +1,62 @@ +--- +subcategory: "Data Share" +layout: "azurerm" +page_title: "Azure Resource Manager: Data Source: azurerm_data_share_dataset_data_lake_gen2" +description: |- + Gets information about an existing Data Share Data Lake Gen2 Dataset. +--- + +# Data Source: azurerm_data_share_dataset_data_lake_gen2 + +Use this data source to access information about an existing Data Share Data Lake Gen2 Dataset. + +## Example Usage + +```hcl +provider "azurerm" { + features {} +} + +data "azurerm_data_share_dataset_data_lake_gen2" "example" { + name = "example-dsdlg2ds" + share_id = "example-share-id" +} + +output "id" { + value = data.azurerm_data_share_dataset_data_lake_gen2.example.id +} +``` + +## Arguments Reference + +The following arguments are supported: + +* `name` - (Required) The name of this Data Share Data Lake Gen2 Dataset. + +* `share_id` - (Required) The ID of the Data Share in which this Data Share Data Lake Gen2 Dataset should be created. + +## Attributes Reference + +In addition to the Arguments listed above - the following Attributes are exported: + +* `id` - The ID of the Data Share Data Lake Gen2 Dataset. + +* `storage_account_name` - The name of the storage account of the data lake file system to be shared with the receiver. + +* `storage_account_resource_group_name` - The resource group name of the storage account of the data lake file system to be shared with the receiver. + +* `storage_account_subscription_id` - The subscription id of the storage account of the data lake file system to be shared with the receiver. + +* `file_system_name` - The name of the data lake file system to be shared with the receiver. + +* `file_path` - The path of the file in the data lake file system to be shared with the receiver. + +* `folder_path` - The folder path in the data lake file system to be shared with the receiver. + +* `display_name` - The name of the Data Share Dataset. + +## Timeouts + +The `timeouts` block allows you to specify [timeouts](https://www.terraform.io/docs/configuration/resources.html#timeouts) for certain actions: + +* `read` - (Defaults to 5 minutes) Used when retrieving the Data Share Data Lake Gen2 Dataset. diff --git a/website/docs/d/data_share_dataset_kusto_cluster.html.markdown b/website/docs/d/data_share_dataset_kusto_cluster.html.markdown new file mode 100644 index 000000000000..abc474d14e45 --- /dev/null +++ b/website/docs/d/data_share_dataset_kusto_cluster.html.markdown @@ -0,0 +1,54 @@ +--- +subcategory: "Data Share" +layout: "azurerm" +page_title: "Azure Resource Manager: Data Source: azurerm_data_share_dataset_kusto_cluster" +description: |- + Gets information about an existing Data Share Kusto Cluster Dataset. +--- + +# Data Source: azurerm_data_share_dataset_kusto_cluster + +Use this data source to access information about an existing Data Share Kusto Cluster Dataset. + +## Example Usage + +```hcl +provider "azurerm" { + features {} +} + +data "azurerm_data_share_dataset_kusto_cluster" "example" { + name = "example-dskcds" + share_id = "example-share-id" +} + +output "id" { + value = data.azurerm_data_share_dataset_kusto_cluster.example.id +} +``` + +## Arguments Reference + +The following arguments are supported: + +* `name` - (Required) The name of this Data Share Kusto Cluster Dataset. + +* `share_id` - (Required) The ID of the Data Share in which this Data Share Kusto Cluster Dataset should be created. + +## Attributes Reference + +In addition to the Arguments listed above - the following Attributes are exported: + +* `id` - The ID of the Data Share Kusto Cluster Dataset. + +* `kusto_cluster_id` - The ID of the Kusto Cluster to be shared with the receiver. + +* `display_name` - The name of the Data Share Dataset. + +* `kusto_cluster_location` - The location of the Kusto Cluster. + +## Timeouts + +The `timeouts` block allows you to specify [timeouts](https://www.terraform.io/docs/configuration/resources.html#timeouts) for certain actions: + +* `read` - (Defaults to 5 minutes) Used when retrieving the Data Share Kusto Cluster Dataset. diff --git a/website/docs/d/data_share_dataset_kusto_database.html.markdown b/website/docs/d/data_share_dataset_kusto_database.html.markdown new file mode 100644 index 000000000000..3c293928b789 --- /dev/null +++ b/website/docs/d/data_share_dataset_kusto_database.html.markdown @@ -0,0 +1,54 @@ +--- +subcategory: "Data Share" +layout: "azurerm" +page_title: "Azure Resource Manager: Data Source: azurerm_data_share_dataset_kusto_database" +description: |- + Gets information about an existing Data Share Kusto Database Dataset. +--- + +# Data Source: azurerm_data_share_dataset_kusto_database + +Use this data source to access information about an existing Data Share Kusto Database Dataset. + +## Example Usage + +```hcl +provider "azurerm" { + features {} +} + +data "azurerm_data_share_dataset_kusto_database" "example" { + name = "example-dskdds" + share_id = "example-share-id" +} + +output "id" { + value = data.azurerm_data_share_dataset_kusto_database.example.id +} +``` + +## Arguments Reference + +The following arguments are supported: + +* `name` - (Required) The name of this Data Share Kusto Database Dataset. + +* `share_id` - (Required) The ID of the Data Share in which this Data Share Kusto Database Dataset should be created. + +## Attributes Reference + +In addition to the Arguments listed above - the following Attributes are exported: + +* `id` - The ID of the Data Share Kusto Database Dataset. + +* `kusto_database_id` - The ID of the Kusto Cluster Database to be shared with the receiver. + +* `display_name` - The name of the Data Share Dataset. + +* `kusto_cluster_location` - The location of the Kusto Cluster. + +## Timeouts + +The `timeouts` block allows you to specify [timeouts](https://www.terraform.io/docs/configuration/resources.html#timeouts) for certain actions: + +* `read` - (Defaults to 5 minutes) Used when retrieving the Data Share Kusto Database Dataset. diff --git a/website/docs/r/data_share_dataset_blob_storage.html.markdown b/website/docs/r/data_share_dataset_blob_storage.html.markdown new file mode 100644 index 000000000000..bbdd7b518f30 --- /dev/null +++ b/website/docs/r/data_share_dataset_blob_storage.html.markdown @@ -0,0 +1,120 @@ +--- +subcategory: "Data Share" +layout: "azurerm" +page_title: "Azure Resource Manager: azurerm_data_share_dataset_blob_storage" +description: |- + Manages a Data Share Blob Storage Dataset. +--- + +# azurerm_data_share_dataset_blob_storage + +Manages a Data Share Blob Storage Dataset. + +## Example Usage + +```hcl +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "example" { + name = "example-resources" + location = "West Europe" +} + +resource "azurerm_data_share_account" "example" { + name = "example-dsa" + location = azurerm_resource_group.example.location + resource_group_name = azurerm_resource_group.example.name + identity { + type = "SystemAssigned" + } +} + +resource "azurerm_data_share" "example" { + name = "example_ds" + account_id = azurerm_data_share_account.example.id + kind = "CopyBased" +} + +resource "azurerm_storage_account" "example" { + name = "examplestr" + resource_group_name = azurerm_resource_group.example.name + location = azurerm_resource_group.example.location + account_tier = "Standard" + account_replication_type = "RAGRS" +} + +resource "azurerm_storage_container" "example" { + name = "example-sc" + storage_account_name = azurerm_storage_account.example.name + container_access_type = "container" +} + +data "azuread_service_principal" "example" { + display_name = azurerm_data_share_account.example.name +} + +resource "azurerm_role_assignment" "example" { + scope = azurerm_storage_account.example.id + role_definition_name = "Storage Blob Data Reader" + principal_id = data.azuread_service_principal.example.object_id +} + +resource "azurerm_data_share_dataset_blob_storage" "example" { + name = "example-dsbsds-file" + share_id = azurerm_data_share.example.id + container_name = azurerm_storage_container.example.name + storage_account_name = azurerm_storage_account.example.name + storage_account_resource_group_name = azurerm_storage_account.example.resource_group_name + storage_account_subscription_id = "00000000-0000-0000-0000-000000000000" + file_path = "myfile.txt" + depends_on = [ + azurerm_role_assignment.example, + ] +} +``` + +## Arguments Reference + +The following arguments are supported: + +* `name` - (Required) The name which should be used for this Data Share Blob Storage Dataset. Changing this forces a new Data Share Blob Storage Dataset to be created. + +* `share_id` - (Required) The ID of the Data Share in which this Data Share Blob Storage Dataset should be created. Changing this forces a new Data Share Blob Storage Dataset to be created. + +* `container_name` - (Required) The name of the storage account container to be shared with the receiver. Changing this forces a new Data Share Blob Storage Dataset to be created. + +* `storage_account_name` - (Required) The name of the storage account to be shared with the receiver. Changing this forces a new Data Share Blob Storage Dataset to be created. + +* `storage_account_resource_group_name` - (Required) The resource group name of the storage account to be shared with the receiver. Changing this forces a new Data Share Blob Storage Dataset to be created. + +* `storage_account_subscription_id` - (Required) The subscription id of the storage account to be shared with the receiver. Changing this forces a new Data Share Blob Storage Dataset to be created. + +* `file_path` - (Optional) The path of the file in the storage container to be shared with the receiver. Changing this forces a new Data Share Blob Storage Dataset to be created. + +* `folder_path` - (Optional) The path of the folder in the storage container to be shared with the receiver. Changing this forces a new Data Share Blob Storage Dataset to be created. + +## Attributes Reference + +In addition to the Arguments listed above - the following Attributes are exported: + +* `id` - The ID of the Data Share Blob Storage Dataset. + +* `display_name` - The name of the Data Share Dataset. + +## Timeouts + +The `timeouts` block allows you to specify [timeouts](https://www.terraform.io/docs/configuration/resources.html#timeouts) for certain actions: + +* `create` - (Defaults to 30 minutes) Used when creating the Data Share Blob Storage Dataset. +* `read` - (Defaults to 5 minutes) Used when retrieving the Data Share Blob Storage Dataset. +* `delete` - (Defaults to 30 minutes) Used when deleting the Data Share Blob Storage Dataset. + +## Import + +Data Share Blob Storage Datasets can be imported using the `resource id`, e.g. + +```shell +terraform import azurerm_data_share_dataset_blob_storage.example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.DataShare/accounts/account1/shares/share1/dataSets/dataSet1 +``` diff --git a/website/docs/r/data_share_dataset_data_lake_gen1.html.markdown b/website/docs/r/data_share_dataset_data_lake_gen1.html.markdown new file mode 100644 index 000000000000..2db8e35e96ae --- /dev/null +++ b/website/docs/r/data_share_dataset_data_lake_gen1.html.markdown @@ -0,0 +1,119 @@ +--- +subcategory: "Data Share" +layout: "azurerm" +page_title: "Azure Resource Manager: azurerm_data_share_dataset_data_lake_gen1" +description: |- + Manages a Data Share Data Lake Gen1 Dataset. +--- + +# azurerm_data_share_dataset_data_lake_gen1 + +Manages a Data Share Data Lake Gen1 Dataset. + +## Example Usage + +```hcl +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "example" { + name = "example-resources" + location = "West Europe" +} + +resource "azurerm_data_share_account" "example" { + name = "example-dsa" + location = azurerm_resource_group.example.location + resource_group_name = azurerm_resource_group.example.name + identity { + type = "SystemAssigned" + } +} + +resource "azurerm_data_share" "example" { + name = "example_ds" + account_id = azurerm_data_share_account.example.id + kind = "CopyBased" +} + +resource "azurerm_data_lake_store" "example" { + name = "exampledls" + resource_group_name = azurerm_resource_group.example.name + location = azurerm_resource_group.example.location + firewall_state = "Disabled" +} + +resource "azurerm_data_lake_store_file" "example" { + account_name = azurerm_data_lake_store.example.name + local_file_path = "./example/myfile.txt" + remote_file_path = "/example/myfile.txt" +} + +data "azuread_service_principal" "example" { + display_name = azurerm_data_share_account.example.name +} + +resource "azurerm_role_assignment" "example" { + scope = azurerm_data_lake_store.example.id + role_definition_name = "Owner" + principal_id = data.azuread_service_principal.example.object_id +} + +resource "azurerm_data_share_dataset_data_lake_gen1" "example" { + name = "example-dlg1ds" + share_id = azurerm_data_share.example.id + data_lake_store_name = azurerm_data_lake_store.example.name + data_lake_store_resource_group_name = azurerm_data_lake_store.example.resource_group_name + data_lake_store_subscription_id = "00000000-0000-0000-0000-000000000000" + file_name = "myfile.txt" + folder_path = "/example/" + depends_on = [ + azurerm_role_assignment.example, + ] +} +``` + +## Arguments Reference + +The following arguments are supported: + +* `name` - (Required) The name which should be used for this Data Share Data Lake Gen1 Dataset. Changing this forces a new Data Share Data Lake Gen1 Dataset to be created. + +* `share_id` - (Required) The ID of the Data Share in which this Data Share Data Lake Gen1 Dataset should be created. Changing this forces a new Data Share Data Lake Gen1 Dataset to be created. + +* `data_lake_store_name` - (Required) The name of the data lake store to be shared with the receiver. Changing this forces a new Data Share Data Lake Gen1 Dataset to be created. + +* `data_lake_store_resource_group_name` - (Required) The resource group name of the data lake store to be shared with the receiver. Changing this forces a new Data Share Data Lake Gen1 Dataset to be created. + +* `data_lake_store_subscription_id` - (Required) The subscription id of the data lake store to be shared with the receiver. Changing this forces a new Data Share Data Lake Gen1 Dataset to be created. + +* `folder_path` - (Required) The folder path of the data lake store to be shared with the receiver. Changing this forces a new Data Share Data Lake Gen1 Dataset to be created. + +--- + +* `file_name` - (Optional) The file name in the folder path of the data lake store to be shared with the receiver. Changing this forces a new Data Share Data Lake Gen1 Dataset to be created. + +## Attributes Reference + +In addition to the Arguments listed above - the following Attributes are exported: + +* `id` - The ID of the Data Share Data Lake Gen1 Dataset. + +* `display_name` - The name of the Data Share Dataset. + +## Timeouts + +The `timeouts` block allows you to specify [timeouts](https://www.terraform.io/docs/configuration/resources.html#timeouts) for certain actions: + +* `create` - (Defaults to 30 minutes) Used when creating the Data Share Data Lake Gen1 Dataset. +* `read` - (Defaults to 5 minutes) Used when retrieving the Data Share Data Lake Gen1 Dataset. +* `delete` - (Defaults to 30 minutes) Used when deleting the Data Share Data Lake Gen1 Dataset. + +## Import + +Data Share Data Lake Gen1 Datasets can be imported using the `resource id`, e.g. + +```shell +terraform import azurerm_data_share_dataset_data_lake_gen1.example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.DataShare/accounts/account1/shares/share1/dataSets/dataSet1 +``` diff --git a/website/docs/r/data_share_dataset_data_lake_gen2.html.markdown b/website/docs/r/data_share_dataset_data_lake_gen2.html.markdown new file mode 100644 index 000000000000..ddb31df177d7 --- /dev/null +++ b/website/docs/r/data_share_dataset_data_lake_gen2.html.markdown @@ -0,0 +1,122 @@ +--- +subcategory: "Data Share" +layout: "azurerm" +page_title: "Azure Resource Manager: azurerm_data_share_dataset_data_lake_gen2" +description: |- + Manages a Data Share Data Lake Gen2 Dataset. +--- + +# azurerm_data_share_dataset_data_lake_gen2 + +Manages a Data Share Data Lake Gen2 Dataset. + +## Example Usage + +```hcl +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "example" { + name = "example-resources" + location = "West Europe" +} + +resource "azurerm_data_share_account" "example" { + name = "example-dsa" + location = azurerm_resource_group.example.location + resource_group_name = azurerm_resource_group.example.name + identity { + type = "SystemAssigned" + } +} + +resource "azurerm_data_share" "example" { + name = "example_ds" + account_id = azurerm_data_share_account.example.id + kind = "CopyBased" +} + +resource "azurerm_storage_account" "example" { + name = "examplestr" + resource_group_name = azurerm_resource_group.example.name + location = azurerm_resource_group.example.location + account_kind = "BlobStorage" + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_storage_data_lake_gen2_filesystem" "example" { + name = "example-dlg2fs" + storage_account_id = azurerm_storage_account.example.id +} + +data "azuread_service_principal" "example" { + display_name = azurerm_data_share_account.example.name +} + +resource "azurerm_role_assignment" "example" { + scope = azurerm_storage_account.example.id + role_definition_name = "Storage Blob Data Reader" + principal_id = data.azuread_service_principal.example.object_id +} + +resource "azurerm_data_share_dataset_data_lake_gen2" "example" { + name = "accexample-dlg2ds" + share_id = azurerm_data_share.example.id + storage_account_name = azurerm_storage_account.example.name + storage_account_resource_group_name = azurerm_storage_account.example.resource_group_name + storage_account_subscription_id = "00000000-0000-0000-0000-000000000000" + file_system_name = azurerm_storage_data_lake_gen2_filesystem.example.name + file_path = "myfile.txt" + depends_on = [ + azurerm_role_assignment.example, + ] +} +``` + +## Arguments Reference + +The following arguments are supported: + +* `name` - (Required) The name which should be used for this Data Share Data Lake Gen2 Dataset. Changing this forces a new Data Share Data Lake Gen2 Dataset to be created. + +* `share_id` - (Required) The ID of the Data Share in which this Data Share Data Lake Gen2 Dataset should be created. Changing this forces a new Data Share Data Lake Gen2 Dataset to be created. + +* `storage_account_name` - (Required) The name of the storage account of the data lake file system to be shared with the receiver. Changing this forces a new Data Share Data Lake Gen2 Dataset to be created. + +* `storage_account_resource_group_name` - (Required) The resource group name of the storage account of the data lake file system to be shared with the receiver. Changing this forces a new Data Share Data Lake Gen2 Dataset to be created. + +* `storage_account_subscription_id` - (Required) The subscription id of the storage account of the data lake file system to be shared with the receiver. Changing this forces a new Data Share Data Lake Gen2 Dataset to be created. + +* `file_system_name` - (Required) The name of the data lake file system to be shared with the receiver. Changing this forces a new Data Share Data Lake Gen2 Dataset to be created. + +--- + +* `file_path` - (Optional) The path of the file in the data lake file system to be shared with the receiver. Conflicts with `folder_path` Changing this forces a new Data Share Data Lake Gen2 Dataset to be created. + +* `folder_path` - (Optional) The folder path in the data lake file system to be shared with the receiver. Conflicts with `file_path` Changing this forces a new Data Share Data Lake Gen2 Dataset to be created. + +## Attributes Reference + +In addition to the Arguments listed above - the following Attributes are exported: + +* `id` - The ID of the Data Share Data Lake Gen2 Dataset. + +* `display_name` - The name of the Data Share Dataset. + +## Timeouts + +The `timeouts` block allows you to specify [timeouts](https://www.terraform.io/docs/configuration/resources.html#timeouts) for certain actions: + +* `create` - (Defaults to 30 minutes) Used when creating the Data Share Data Lake Gen2 Dataset. +* `read` - (Defaults to 5 minutes) Used when retrieving the Data Share Data Lake Gen2 Dataset. +* `delete` - (Defaults to 30 minutes) Used when deleting the Data Share Data Lake Gen2 Dataset. + +## Import + +Data Share Data Lake Gen2 Datasets can be imported using the `resource id`, e.g. + +```shell +terraform import azurerm_data_share_dataset_data_lake_gen2.example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.DataShare/accounts/account1/shares/share1/dataSets/dataSet1 +``` diff --git a/website/docs/r/data_share_dataset_kusto_cluster.html.markdown b/website/docs/r/data_share_dataset_kusto_cluster.html.markdown new file mode 100644 index 000000000000..6d5ad9f61176 --- /dev/null +++ b/website/docs/r/data_share_dataset_kusto_cluster.html.markdown @@ -0,0 +1,105 @@ +--- +subcategory: "Data Share" +layout: "azurerm" +page_title: "Azure Resource Manager: azurerm_data_share_dataset_kusto_cluster" +description: |- + Manages a Data Share Kusto Cluster Dataset. +--- + +# azurerm_data_share_dataset_kusto_cluster + +Manages a Data Share Kusto Cluster Dataset. + +## Example Usage + +```hcl +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "example" { + name = "example-resources" + location = "West Europe" +} + +resource "azurerm_data_share_account" "example" { + name = "example-dsa" + location = azurerm_resource_group.example.location + resource_group_name = azurerm_resource_group.example.name + identity { + type = "SystemAssigned" + } +} + +resource "azurerm_data_share" "example" { + name = "example_ds" + account_id = azurerm_data_share_account.example.id + kind = "InPlace" +} + +resource "azurerm_kusto_cluster" "example" { + name = "examplekc" + location = azurerm_resource_group.example.location + resource_group_name = azurerm_resource_group.example.name + + sku { + name = "Dev(No SLA)_Standard_D11_v2" + capacity = 1 + } +} + +data "azuread_service_principal" "example" { + display_name = azurerm_data_share_account.example.name +} + +resource "azurerm_role_assignment" "example" { + scope = azurerm_kusto_cluster.example.id + role_definition_name = "Contributor" + principal_id = data.azuread_service_principal.example.object_id +} + +resource "azurerm_data_share_dataset_kusto_cluster" "example" { + name = "example-dsbkcds" + share_id = azurerm_data_share.example.id + kusto_cluster_id = azurerm_kusto_cluster.example.id + depends_on = [ + azurerm_role_assignment.example, + ] +} +``` + +## Arguments Reference + +The following arguments are supported: + +* `name` - (Required) The name which should be used for this Data Share Kusto Cluster Dataset. Changing this forces a new Data Share Kusto Cluster Dataset to be created. + +* `share_id` - (Required) The ID of the Data Share in which this Data Share Kusto Cluster Dataset should be created. Changing this forces a new Data Share Kusto Cluster Dataset to be created. + +* `kusto_cluster_id` - (Required) The ID of the Kusto Cluster to be shared with the receiver. Changing this forces a new Data Share Kusto Cluster Dataset to be created. + +## Attributes Reference + +In addition to the Arguments listed above - the following Attributes are exported: + +* `id` - The ID of the Data Share Kusto Cluster Dataset. + +* `display_name` - The name of the Data Share Dataset. + +* `kusto_cluster_location` - The location of the Kusto Cluster. + +## Timeouts + +The `timeouts` block allows you to specify [timeouts](https://www.terraform.io/docs/configuration/resources.html#timeouts) for certain actions: + +* `create` - (Defaults to 30 minutes) Used when creating the Data Share Kusto Cluster Dataset. +* `read` - (Defaults to 5 minutes) Used when retrieving the Data Share Kusto Cluster Dataset. +* `delete` - (Defaults to 30 minutes) Used when deleting the Data Share Kusto Cluster Dataset. + +## Import + +Data Share Kusto Cluster Datasets can be imported using the `resource id`, e.g. + +```shell +terraform import azurerm_data_share_dataset_kusto_cluster.example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.DataShare/accounts/account1/shares/share1/dataSets/dataSet1 +``` diff --git a/website/docs/r/data_share_dataset_kusto_database.html.markdown b/website/docs/r/data_share_dataset_kusto_database.html.markdown new file mode 100644 index 000000000000..d7144859a6da --- /dev/null +++ b/website/docs/r/data_share_dataset_kusto_database.html.markdown @@ -0,0 +1,112 @@ +--- +subcategory: "Data Share" +layout: "azurerm" +page_title: "Azure Resource Manager: azurerm_data_share_dataset_kusto_database" +description: |- + Manages a Data Share Kusto Database Dataset. +--- + +# azurerm_data_share_dataset_kusto_database + +Manages a Data Share Kusto Database Dataset. + +## Example Usage + +```hcl +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "example" { + name = "example-resources" + location = "West Europe" +} + +resource "azurerm_data_share_account" "example" { + name = "example-dsa" + location = azurerm_resource_group.example.location + resource_group_name = azurerm_resource_group.example.name + identity { + type = "SystemAssigned" + } +} + +resource "azurerm_data_share" "example" { + name = "example_ds" + account_id = azurerm_data_share_account.example.id + kind = "InPlace" +} + +resource "azurerm_kusto_cluster" "example" { + name = "examplekc" + location = azurerm_resource_group.example.location + resource_group_name = azurerm_resource_group.example.name + + sku { + name = "Dev(No SLA)_Standard_D11_v2" + capacity = 1 + } +} + +resource "azurerm_kusto_database" "example" { + name = "examplekd" + resource_group_name = azurerm_resource_group.example.name + location = azurerm_resource_group.example.location + cluster_name = azurerm_kusto_cluster.example.name +} + +data "azuread_service_principal" "example" { + display_name = azurerm_data_share_account.example.name +} + +resource "azurerm_role_assignment" "example" { + scope = azurerm_kusto_cluster.example.id + role_definition_name = "Contributor" + principal_id = data.azuread_service_principal.example.object_id +} + +resource "azurerm_data_share_dataset_kusto_database" "example" { + name = "example-dsbds" + share_id = azurerm_data_share.example.id + kusto_database_id = azurerm_kusto_database.example.id + depends_on = [ + azurerm_role_assignment.example, + ] +} +``` + +## Arguments Reference + +The following arguments are supported: + +* `name` - (Required) The name which should be used for this Data Share Kusto Database Dataset. Changing this forces a new Data Share Kusto Database Dataset to be created. + +* `share_id` - (Required) The ID of the Data Share in which this Data Share Kusto Database Dataset should be created. Changing this forces a new Data Share Kusto Database Dataset to be created. + +* `kusto_database_id` - (Required) The ID of the Kusto Cluster Database to be shared with the receiver. Changing this forces a new Data Share Kusto Database Dataset to be created. + +## Attributes Reference + +In addition to the Arguments listed above - the following Attributes are exported: + +* `id` - The ID of the Data Share Kusto Database Dataset. + +* `display_name` - The name of the Data Share Dataset. + +* `kusto_cluster_location` - The location of the Kusto Cluster. + +## Timeouts + +The `timeouts` block allows you to specify [timeouts](https://www.terraform.io/docs/configuration/resources.html#timeouts) for certain actions: + +* `create` - (Defaults to 30 minutes) Used when creating the Data Share Kusto Database Dataset. +* `read` - (Defaults to 5 minutes) Used when retrieving the Data Share Kusto Database Dataset. +* `delete` - (Defaults to 30 minutes) Used when deleting the Data Share Kusto Database Dataset. + +## Import + +Data Share Kusto Database Datasets can be imported using the `resource id`, e.g. + +```shell +terraform import azurerm_data_share_dataset_kusto_database.example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.DataShare/accounts/account1/shares/share1/dataSets/dataSet1 +```