diff --git a/azurerm/internal/services/datafactory/data_factory_dataset_delimited_text_resource.go b/azurerm/internal/services/datafactory/data_factory_dataset_delimited_text_resource.go index 0e290918807c..d90bc1f855c5 100644 --- a/azurerm/internal/services/datafactory/data_factory_dataset_delimited_text_resource.go +++ b/azurerm/internal/services/datafactory/data_factory_dataset_delimited_text_resource.go @@ -64,7 +64,7 @@ func resourceArmDataFactoryDatasetDelimitedText() *schema.Resource { Type: schema.TypeList, MaxItems: 1, Optional: true, - //ConflictsWith: []string{"sftp_server_location", "file_server_location", "s3_location", "blob_storage_location"}, + //ConflictsWith: []string{"sftp_server_location", "file_server_location", "s3_location", "azure_blob_storage_location"}, ConflictsWith: []string{"azure_blob_storage_location"}, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ @@ -92,7 +92,7 @@ func resourceArmDataFactoryDatasetDelimitedText() *schema.Resource { Type: schema.TypeList, MaxItems: 1, Optional: true, - //ConflictsWith: []string{"sftp_server_location", "file_server_location", "s3_location", "blob_storage_location"}, + //ConflictsWith: []string{"sftp_server_location", "file_server_location", "s3_location", "azure_blob_storage_location"}, ConflictsWith: []string{"http_server_location"}, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ @@ -266,7 +266,7 @@ func resourceArmDataFactoryDatasetDelimitedTextCreateUpdate(d *schema.ResourceDa location := expandDataFactoryDatasetLocation(d) if location == nil { - return fmt.Errorf("One of `http_server_location`, `blob_storage_location` must be specified to create a DataFactory Delimited Text Dataset") + return fmt.Errorf("One of `http_server_location`, `azure_blob_storage_location` must be specified to create a DataFactory Delimited Text Dataset") } delimited_textDatasetProperties := datafactory.DelimitedTextDatasetTypeProperties{ @@ -404,8 +404,8 @@ func resourceArmDataFactoryDatasetDelimitedTextRead(d *schema.ResourceData, meta } } if azureBlobStorageLocation, ok := properties.Location.AsAzureBlobStorageLocation(); ok { - if err := d.Set("http_server_location", flattenDataFactoryDatasetAzureBlobStorageLocation(azureBlobStorageLocation)); err != nil { - return fmt.Errorf("Error setting `http_server_location` for Data Factory Delimited Text Dataset %s", err) + if err := d.Set("azure_blob_storage_location", flattenDataFactoryDatasetAzureBlobStorageLocation(azureBlobStorageLocation)); err != nil { + return fmt.Errorf("Error setting `azure_blob_storage_location` for Data Factory Delimited Text Dataset %s", err) } } @@ -499,7 +499,7 @@ func expandDataFactoryDatasetLocation(d *schema.ResourceData) datafactory.BasicD return expandDataFactoryDatasetHttpServerLocation(d) } - if _, ok := d.GetOk("blob_storage_location"); ok { + if _, ok := d.GetOk("azure_blob_storage_location"); ok { return expandDataFactoryDatasetAzureBlobStorageLocation(d) } @@ -520,7 +520,7 @@ func expandDataFactoryDatasetHttpServerLocation(d *schema.ResourceData) datafact return httpServerLocation } func expandDataFactoryDatasetAzureBlobStorageLocation(d *schema.ResourceData) datafactory.BasicDatasetLocation { - props := d.Get("blob_storage_location").([]interface{})[0].(map[string]interface{}) + props := d.Get("azure_blob_storage_location").([]interface{})[0].(map[string]interface{}) container := props["container"].(string) path := props["path"].(string) filename := props["filename"].(string) diff --git a/azurerm/internal/services/datafactory/data_factory_dataset_delimited_text_resource_test.go b/azurerm/internal/services/datafactory/data_factory_dataset_delimited_text_resource_test.go index d53b073c3e05..76d1bc05b94d 100644 --- a/azurerm/internal/services/datafactory/data_factory_dataset_delimited_text_resource_test.go +++ b/azurerm/internal/services/datafactory/data_factory_dataset_delimited_text_resource_test.go @@ -12,61 +12,6 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func TestAccAzureRMDataFactoryDatasetDelimitedText_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_data_factory_dataset_delimited_text", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDataFactoryDatasetDelimitedTextDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDataFactoryDatasetDelimitedText_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataFactoryDatasetDelimitedTextExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMDataFactoryDatasetDelimitedText_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_data_factory_dataset_delimited_text", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDataFactoryDatasetDelimitedTextDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDataFactoryDatasetDelimitedText_update1(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataFactoryDatasetDelimitedTextExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "parameters.%", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "annotations.#", "3"), - resource.TestCheckResourceAttr(data.ResourceName, "schema_column.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "additional_properties.%", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "description", "test description"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMDataFactoryDatasetDelimitedText_update2(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataFactoryDatasetDelimitedTextExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "parameters.%", "3"), - resource.TestCheckResourceAttr(data.ResourceName, "annotations.#", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "schema_column.#", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "additional_properties.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "description", "test description 2"), - ), - }, - data.ImportStep(), - }, - }) -} - func testCheckAzureRMDataFactoryDatasetDelimitedTextExists(name string) resource.TestCheckFunc { return func(s *terraform.State) error { client := acceptance.AzureProvider.Meta().(*clients.Client).DataFactory.DatasetClient @@ -125,7 +70,62 @@ func testCheckAzureRMDataFactoryDatasetDelimitedTextDestroy(s *terraform.State) return nil } -func testAccAzureRMDataFactoryDatasetDelimitedText_basic(data acceptance.TestData) string { +func TestAccAzureRMDataFactoryDatasetDelimitedText_http(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_factory_dataset_delimited_text", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMDataFactoryDatasetDelimitedTextDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMDataFactoryDatasetDelimitedText_http(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataFactoryDatasetDelimitedTextExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccAzureRMDataFactoryDatasetDelimitedText_http_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_factory_dataset_delimited_text", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMDataFactoryDatasetDelimitedTextDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMDataFactoryDatasetDelimitedText_http_update1(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataFactoryDatasetDelimitedTextExists(data.ResourceName), + resource.TestCheckResourceAttr(data.ResourceName, "parameters.%", "2"), + resource.TestCheckResourceAttr(data.ResourceName, "annotations.#", "3"), + resource.TestCheckResourceAttr(data.ResourceName, "schema_column.#", "1"), + resource.TestCheckResourceAttr(data.ResourceName, "additional_properties.%", "2"), + resource.TestCheckResourceAttr(data.ResourceName, "description", "test description"), + ), + }, + data.ImportStep(), + { + Config: testAccAzureRMDataFactoryDatasetDelimitedText_http_update2(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataFactoryDatasetDelimitedTextExists(data.ResourceName), + resource.TestCheckResourceAttr(data.ResourceName, "parameters.%", "3"), + resource.TestCheckResourceAttr(data.ResourceName, "annotations.#", "2"), + resource.TestCheckResourceAttr(data.ResourceName, "schema_column.#", "2"), + resource.TestCheckResourceAttr(data.ResourceName, "additional_properties.%", "1"), + resource.TestCheckResourceAttr(data.ResourceName, "description", "test description 2"), + ), + }, + data.ImportStep(), + }, + }) +} + +func testAccAzureRMDataFactoryDatasetDelimitedText_http(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} @@ -174,7 +174,7 @@ resource "azurerm_data_factory_dataset_delimited_text" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) } -func testAccAzureRMDataFactoryDatasetDelimitedText_update1(data acceptance.TestData) string { +func testAccAzureRMDataFactoryDatasetDelimitedText_http_update1(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} @@ -243,7 +243,7 @@ resource "azurerm_data_factory_dataset_delimited_text" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) } -func testAccAzureRMDataFactoryDatasetDelimitedText_update2(data acceptance.TestData) string { +func testAccAzureRMDataFactoryDatasetDelimitedText_http_update2(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} @@ -317,3 +317,85 @@ resource "azurerm_data_factory_dataset_delimited_text" "test" { } `, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) } + +func TestAccAzureRMDataFactoryDatasetDelimitedText_blob(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_factory_dataset_delimited_text", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMDataFactoryDatasetDelimitedTextDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMDataFactoryDatasetDelimitedText_blob(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataFactoryDatasetDelimitedTextExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + +func testAccAzureRMDataFactoryDatasetDelimitedText_blob(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-df-%d" + location = "%s" +} + +resource "azurerm_storage_account" "test" { + name = "acctestdf%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + account_tier = "Standard" + account_replication_type = "GRS" +} + +resource "azurerm_storage_container" "test" { + name = "content" + storage_account_name = azurerm_storage_account.test.name + container_access_type = "private" +} + +resource "azurerm_data_factory" "test" { + name = "acctestdf%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + + +resource "azurerm_data_factory_linked_service_azure_blob_storage" "test" { + name = "acctestlsblob%d" + resource_group_name = azurerm_resource_group.test.name + data_factory_name = azurerm_data_factory.test.name + connection_string = azurerm_storage_account.test.primary_connection_string +} + +resource "azurerm_data_factory_dataset_delimited_text" "test" { + name = "acctestds%d" + resource_group_name = azurerm_resource_group.test.name + data_factory_name = azurerm_data_factory.test.name + linked_service_name = azurerm_data_factory_linked_service_azure_blob_storage.test.name + + azure_blob_storage_location { + container = azurerm_storage_container.test.name + path = "foo/bar/" + filename = "foo.txt" + } + + column_delimiter = "," + row_delimiter = "NEW" + encoding = "UTF-8" + quote_character = "x" + escape_character = "f" + first_row_as_header = true + null_value = "NULL" + +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/datafactory/data_factory_dataset_json_resource.go b/azurerm/internal/services/datafactory/data_factory_dataset_json_resource.go index 77db338941e9..ce56aa21a922 100644 --- a/azurerm/internal/services/datafactory/data_factory_dataset_json_resource.go +++ b/azurerm/internal/services/datafactory/data_factory_dataset_json_resource.go @@ -64,7 +64,7 @@ func resourceArmDataFactoryDatasetJSON() *schema.Resource { Type: schema.TypeList, MaxItems: 1, Optional: true, - //ConflictsWith: []string{"sftp_server_location", "file_server_location", "s3_location", "blob_storage_location"}, + //ConflictsWith: []string{"sftp_server_location", "file_server_location", "s3_location", "azure_blob_storage_location"}, ConflictsWith: []string{"azure_blob_storage_location"}, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ @@ -92,7 +92,7 @@ func resourceArmDataFactoryDatasetJSON() *schema.Resource { Type: schema.TypeList, MaxItems: 1, Optional: true, - //ConflictsWith: []string{"sftp_server_location", "file_server_location", "s3_location", "blob_storage_location"}, + //ConflictsWith: []string{"sftp_server_location", "file_server_location", "s3_location", "azure_blob_storage_location"}, ConflictsWith: []string{"http_server_location"}, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ @@ -225,7 +225,7 @@ func resourceArmDataFactoryDatasetJSONCreateUpdate(d *schema.ResourceData, meta location := expandDataFactoryDatasetLocation(d) if location == nil { - return fmt.Errorf("One of `http_server_location`, `blob_storage_location` must be specified to create a DataFactory Delimited Text Dataset") + return fmt.Errorf("One of `http_server_location`, `azure_blob_storage_location` must be specified to create a DataFactory Delimited Text Dataset") } jsonDatasetProperties := datafactory.JSONDatasetTypeProperties{ @@ -357,8 +357,8 @@ func resourceArmDataFactoryDatasetJSONRead(d *schema.ResourceData, meta interfac } } if azureBlobStorageLocation, ok := properties.Location.AsAzureBlobStorageLocation(); ok { - if err := d.Set("http_server_location", flattenDataFactoryDatasetAzureBlobStorageLocation(azureBlobStorageLocation)); err != nil { - return fmt.Errorf("Error setting `http_server_location` for Data Factory Delimited Text Dataset %s", err) + if err := d.Set("azure_blob_storage_location", flattenDataFactoryDatasetAzureBlobStorageLocation(azureBlobStorageLocation)); err != nil { + return fmt.Errorf("Error setting `azure_blob_storage_location` for Data Factory Delimited Text Dataset %s", err) } } diff --git a/azurerm/internal/services/datafactory/data_factory_dataset_json_resource_test.go b/azurerm/internal/services/datafactory/data_factory_dataset_json_resource_test.go index db137a43128d..69c3cd0bc319 100644 --- a/azurerm/internal/services/datafactory/data_factory_dataset_json_resource_test.go +++ b/azurerm/internal/services/datafactory/data_factory_dataset_json_resource_test.go @@ -296,3 +296,79 @@ resource "azurerm_data_factory_dataset_json" "test" { } `, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) } + +func TestAccAzureRMDataFactoryDatasetJSON_blob(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_factory_dataset_json", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMDataFactoryDatasetJSONDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMDataFactoryDatasetJSON_blob(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataFactoryDatasetJSONExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + +func testAccAzureRMDataFactoryDatasetJSON_blob(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-df-%d" + location = "%s" +} + +resource "azurerm_storage_account" "test" { + name = "acctestdf%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + account_tier = "Standard" + account_replication_type = "GRS" +} + +resource "azurerm_storage_container" "test" { + name = "content" + storage_account_name = azurerm_storage_account.test.name + container_access_type = "private" +} + +resource "azurerm_data_factory" "test" { + name = "acctestdf%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + + +resource "azurerm_data_factory_linked_service_azure_blob_storage" "test" { + name = "acctestlsblob%d" + resource_group_name = azurerm_resource_group.test.name + data_factory_name = azurerm_data_factory.test.name + connection_string = azurerm_storage_account.test.primary_connection_string +} + +resource "azurerm_data_factory_dataset_json" "test" { + name = "acctestds%d" + resource_group_name = azurerm_resource_group.test.name + data_factory_name = azurerm_data_factory.test.name + linked_service_name = azurerm_data_factory_linked_service_azure_blob_storage.test.name + + azure_blob_storage_location { + container = azurerm_storage_container.test.name + path = "foo/bar/" + filename = "foo.txt" + } + + encoding = "UTF-8" + +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} diff --git a/website/docs/r/data_factory_dataset_json.html.markdown b/website/docs/r/data_factory_dataset_json.html.markdown index 16656e934bfc..20054a9df8d0 100644 --- a/website/docs/r/data_factory_dataset_json.html.markdown +++ b/website/docs/r/data_factory_dataset_json.html.markdown @@ -77,6 +77,10 @@ The following supported arguments are specific to JSON Dataset: * `http_server_location` - (Required) A `http_server_location` block as defined below. +* `azure_blob_storage_location` - (Required) A `azure_blob_storage_location` block as defined below. + +The following supported arguments are specific to Delimited Text Dataset: + * `encoding` - (Required) The encoding format for the file. --- @@ -99,6 +103,16 @@ A `http_server_location` block supports the following: * `filename` - (Required) The filename of the file on the web server. +--- + +A `azure_blob_storage_location` block supports the following: + +* `container` - (Required) The container on the Azure Blob Storage Account hosting the file. + +* `path` - (Required) The folder path to the file on the web server. + +* `filename` - (Required) The filename of the file on the web server. + ## Attributes Reference The following attributes are exported: