Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

azurerm_data_factory_dataset_delimited_text - fix issue with property azure_blob_storage_account #7953

Merged
merged 4 commits into from
Aug 11, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ func resourceArmDataFactoryDatasetDelimitedText() *schema.Resource {
Type: schema.TypeList,
MaxItems: 1,
Optional: true,
//ConflictsWith: []string{"sftp_server_location", "file_server_location", "s3_location", "blob_storage_location"},
//ConflictsWith: []string{"sftp_server_location", "file_server_location", "s3_location", "azure_blob_storage_location"},
ConflictsWith: []string{"azure_blob_storage_location"},
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
Expand Down Expand Up @@ -92,7 +92,7 @@ func resourceArmDataFactoryDatasetDelimitedText() *schema.Resource {
Type: schema.TypeList,
MaxItems: 1,
Optional: true,
//ConflictsWith: []string{"sftp_server_location", "file_server_location", "s3_location", "blob_storage_location"},
//ConflictsWith: []string{"sftp_server_location", "file_server_location", "s3_location", "azure_blob_storage_location"},
ConflictsWith: []string{"http_server_location"},
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
Expand Down Expand Up @@ -266,7 +266,7 @@ func resourceArmDataFactoryDatasetDelimitedTextCreateUpdate(d *schema.ResourceDa

location := expandDataFactoryDatasetLocation(d)
if location == nil {
return fmt.Errorf("One of `http_server_location`, `blob_storage_location` must be specified to create a DataFactory Delimited Text Dataset")
return fmt.Errorf("One of `http_server_location`, `azure_blob_storage_location` must be specified to create a DataFactory Delimited Text Dataset")
}

delimited_textDatasetProperties := datafactory.DelimitedTextDatasetTypeProperties{
Expand Down Expand Up @@ -404,8 +404,8 @@ func resourceArmDataFactoryDatasetDelimitedTextRead(d *schema.ResourceData, meta
}
}
if azureBlobStorageLocation, ok := properties.Location.AsAzureBlobStorageLocation(); ok {
if err := d.Set("http_server_location", flattenDataFactoryDatasetAzureBlobStorageLocation(azureBlobStorageLocation)); err != nil {
return fmt.Errorf("Error setting `http_server_location` for Data Factory Delimited Text Dataset %s", err)
if err := d.Set("azure_blob_storage_location", flattenDataFactoryDatasetAzureBlobStorageLocation(azureBlobStorageLocation)); err != nil {
return fmt.Errorf("Error setting `azure_blob_storage_location` for Data Factory Delimited Text Dataset %s", err)
}
}

Expand Down Expand Up @@ -499,7 +499,7 @@ func expandDataFactoryDatasetLocation(d *schema.ResourceData) datafactory.BasicD
return expandDataFactoryDatasetHttpServerLocation(d)
}

if _, ok := d.GetOk("blob_storage_location"); ok {
if _, ok := d.GetOk("azure_blob_storage_location"); ok {
return expandDataFactoryDatasetAzureBlobStorageLocation(d)
}

Expand All @@ -520,7 +520,7 @@ func expandDataFactoryDatasetHttpServerLocation(d *schema.ResourceData) datafact
return httpServerLocation
}
func expandDataFactoryDatasetAzureBlobStorageLocation(d *schema.ResourceData) datafactory.BasicDatasetLocation {
props := d.Get("blob_storage_location").([]interface{})[0].(map[string]interface{})
props := d.Get("azure_blob_storage_location").([]interface{})[0].(map[string]interface{})
container := props["container"].(string)
path := props["path"].(string)
filename := props["filename"].(string)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,61 +12,6 @@ import (
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils"
)

func TestAccAzureRMDataFactoryDatasetDelimitedText_basic(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_data_factory_dataset_delimited_text", "test")

resource.ParallelTest(t, resource.TestCase{
PreCheck: func() { acceptance.PreCheck(t) },
Providers: acceptance.SupportedProviders,
CheckDestroy: testCheckAzureRMDataFactoryDatasetDelimitedTextDestroy,
Steps: []resource.TestStep{
{
Config: testAccAzureRMDataFactoryDatasetDelimitedText_basic(data),
Check: resource.ComposeTestCheckFunc(
testCheckAzureRMDataFactoryDatasetDelimitedTextExists(data.ResourceName),
),
},
data.ImportStep(),
},
})
}

func TestAccAzureRMDataFactoryDatasetDelimitedText_update(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_data_factory_dataset_delimited_text", "test")

resource.ParallelTest(t, resource.TestCase{
PreCheck: func() { acceptance.PreCheck(t) },
Providers: acceptance.SupportedProviders,
CheckDestroy: testCheckAzureRMDataFactoryDatasetDelimitedTextDestroy,
Steps: []resource.TestStep{
{
Config: testAccAzureRMDataFactoryDatasetDelimitedText_update1(data),
Check: resource.ComposeTestCheckFunc(
testCheckAzureRMDataFactoryDatasetDelimitedTextExists(data.ResourceName),
resource.TestCheckResourceAttr(data.ResourceName, "parameters.%", "2"),
resource.TestCheckResourceAttr(data.ResourceName, "annotations.#", "3"),
resource.TestCheckResourceAttr(data.ResourceName, "schema_column.#", "1"),
resource.TestCheckResourceAttr(data.ResourceName, "additional_properties.%", "2"),
resource.TestCheckResourceAttr(data.ResourceName, "description", "test description"),
),
},
data.ImportStep(),
{
Config: testAccAzureRMDataFactoryDatasetDelimitedText_update2(data),
Check: resource.ComposeTestCheckFunc(
testCheckAzureRMDataFactoryDatasetDelimitedTextExists(data.ResourceName),
resource.TestCheckResourceAttr(data.ResourceName, "parameters.%", "3"),
resource.TestCheckResourceAttr(data.ResourceName, "annotations.#", "2"),
resource.TestCheckResourceAttr(data.ResourceName, "schema_column.#", "2"),
resource.TestCheckResourceAttr(data.ResourceName, "additional_properties.%", "1"),
resource.TestCheckResourceAttr(data.ResourceName, "description", "test description 2"),
),
},
data.ImportStep(),
},
})
}

func testCheckAzureRMDataFactoryDatasetDelimitedTextExists(name string) resource.TestCheckFunc {
return func(s *terraform.State) error {
client := acceptance.AzureProvider.Meta().(*clients.Client).DataFactory.DatasetClient
Expand Down Expand Up @@ -125,7 +70,62 @@ func testCheckAzureRMDataFactoryDatasetDelimitedTextDestroy(s *terraform.State)
return nil
}

func testAccAzureRMDataFactoryDatasetDelimitedText_basic(data acceptance.TestData) string {
func TestAccAzureRMDataFactoryDatasetDelimitedText_http(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_data_factory_dataset_delimited_text", "test")

resource.ParallelTest(t, resource.TestCase{
PreCheck: func() { acceptance.PreCheck(t) },
Providers: acceptance.SupportedProviders,
CheckDestroy: testCheckAzureRMDataFactoryDatasetDelimitedTextDestroy,
Steps: []resource.TestStep{
{
Config: testAccAzureRMDataFactoryDatasetDelimitedText_http(data),
Check: resource.ComposeTestCheckFunc(
testCheckAzureRMDataFactoryDatasetDelimitedTextExists(data.ResourceName),
),
},
data.ImportStep(),
},
})
}

func TestAccAzureRMDataFactoryDatasetDelimitedText_http_update(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_data_factory_dataset_delimited_text", "test")

resource.ParallelTest(t, resource.TestCase{
PreCheck: func() { acceptance.PreCheck(t) },
Providers: acceptance.SupportedProviders,
CheckDestroy: testCheckAzureRMDataFactoryDatasetDelimitedTextDestroy,
Steps: []resource.TestStep{
{
Config: testAccAzureRMDataFactoryDatasetDelimitedText_http_update1(data),
Check: resource.ComposeTestCheckFunc(
testCheckAzureRMDataFactoryDatasetDelimitedTextExists(data.ResourceName),
resource.TestCheckResourceAttr(data.ResourceName, "parameters.%", "2"),
resource.TestCheckResourceAttr(data.ResourceName, "annotations.#", "3"),
resource.TestCheckResourceAttr(data.ResourceName, "schema_column.#", "1"),
resource.TestCheckResourceAttr(data.ResourceName, "additional_properties.%", "2"),
resource.TestCheckResourceAttr(data.ResourceName, "description", "test description"),
),
},
data.ImportStep(),
{
Config: testAccAzureRMDataFactoryDatasetDelimitedText_http_update2(data),
Check: resource.ComposeTestCheckFunc(
testCheckAzureRMDataFactoryDatasetDelimitedTextExists(data.ResourceName),
resource.TestCheckResourceAttr(data.ResourceName, "parameters.%", "3"),
resource.TestCheckResourceAttr(data.ResourceName, "annotations.#", "2"),
resource.TestCheckResourceAttr(data.ResourceName, "schema_column.#", "2"),
resource.TestCheckResourceAttr(data.ResourceName, "additional_properties.%", "1"),
resource.TestCheckResourceAttr(data.ResourceName, "description", "test description 2"),
),
},
data.ImportStep(),
},
})
}

func testAccAzureRMDataFactoryDatasetDelimitedText_http(data acceptance.TestData) string {
return fmt.Sprintf(`
provider "azurerm" {
features {}
Expand Down Expand Up @@ -174,7 +174,7 @@ resource "azurerm_data_factory_dataset_delimited_text" "test" {
`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger)
}

func testAccAzureRMDataFactoryDatasetDelimitedText_update1(data acceptance.TestData) string {
func testAccAzureRMDataFactoryDatasetDelimitedText_http_update1(data acceptance.TestData) string {
return fmt.Sprintf(`
provider "azurerm" {
features {}
Expand Down Expand Up @@ -243,7 +243,7 @@ resource "azurerm_data_factory_dataset_delimited_text" "test" {
`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger)
}

func testAccAzureRMDataFactoryDatasetDelimitedText_update2(data acceptance.TestData) string {
func testAccAzureRMDataFactoryDatasetDelimitedText_http_update2(data acceptance.TestData) string {
return fmt.Sprintf(`
provider "azurerm" {
features {}
Expand Down Expand Up @@ -317,3 +317,85 @@ resource "azurerm_data_factory_dataset_delimited_text" "test" {
}
`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger)
}

func TestAccAzureRMDataFactoryDatasetDelimitedText_blob(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_data_factory_dataset_delimited_text", "test")

resource.ParallelTest(t, resource.TestCase{
PreCheck: func() { acceptance.PreCheck(t) },
Providers: acceptance.SupportedProviders,
CheckDestroy: testCheckAzureRMDataFactoryDatasetDelimitedTextDestroy,
Steps: []resource.TestStep{
{
Config: testAccAzureRMDataFactoryDatasetDelimitedText_blob(data),
Check: resource.ComposeTestCheckFunc(
testCheckAzureRMDataFactoryDatasetDelimitedTextExists(data.ResourceName),
),
},
data.ImportStep(),
},
})
}

func testAccAzureRMDataFactoryDatasetDelimitedText_blob(data acceptance.TestData) string {
return fmt.Sprintf(`
provider "azurerm" {
features {}
}

resource "azurerm_resource_group" "test" {
name = "acctestRG-df-%d"
location = "%s"
}

resource "azurerm_storage_account" "test" {
name = "acctestdf%s"
location = azurerm_resource_group.test.location
resource_group_name = azurerm_resource_group.test.name
account_tier = "Standard"
account_replication_type = "GRS"
}

resource "azurerm_storage_container" "test" {
name = "content"
storage_account_name = azurerm_storage_account.test.name
container_access_type = "private"
}

resource "azurerm_data_factory" "test" {
name = "acctestdf%d"
location = azurerm_resource_group.test.location
resource_group_name = azurerm_resource_group.test.name
}


resource "azurerm_data_factory_linked_service_azure_blob_storage" "test" {
name = "acctestlsblob%d"
resource_group_name = azurerm_resource_group.test.name
data_factory_name = azurerm_data_factory.test.name
connection_string = azurerm_storage_account.test.primary_connection_string
}

resource "azurerm_data_factory_dataset_delimited_text" "test" {
name = "acctestds%d"
resource_group_name = azurerm_resource_group.test.name
data_factory_name = azurerm_data_factory.test.name
linked_service_name = azurerm_data_factory_linked_service_azure_blob_storage.test.name

azure_blob_storage_location {
container = azurerm_storage_container.test.name
path = "foo/bar/"
filename = "foo.txt"
}

column_delimiter = ","
row_delimiter = "NEW"
encoding = "UTF-8"
quote_character = "x"
escape_character = "f"
first_row_as_header = true
null_value = "NULL"

}
`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger, data.RandomInteger, data.RandomInteger)
}
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ func resourceArmDataFactoryDatasetJSON() *schema.Resource {
Type: schema.TypeList,
MaxItems: 1,
Optional: true,
//ConflictsWith: []string{"sftp_server_location", "file_server_location", "s3_location", "blob_storage_location"},
//ConflictsWith: []string{"sftp_server_location", "file_server_location", "s3_location", "azure_blob_storage_location"},
ConflictsWith: []string{"azure_blob_storage_location"},
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
Expand Down Expand Up @@ -92,7 +92,7 @@ func resourceArmDataFactoryDatasetJSON() *schema.Resource {
Type: schema.TypeList,
MaxItems: 1,
Optional: true,
//ConflictsWith: []string{"sftp_server_location", "file_server_location", "s3_location", "blob_storage_location"},
//ConflictsWith: []string{"sftp_server_location", "file_server_location", "s3_location", "azure_blob_storage_location"},
ConflictsWith: []string{"http_server_location"},
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
Expand Down Expand Up @@ -225,7 +225,7 @@ func resourceArmDataFactoryDatasetJSONCreateUpdate(d *schema.ResourceData, meta

location := expandDataFactoryDatasetLocation(d)
if location == nil {
return fmt.Errorf("One of `http_server_location`, `blob_storage_location` must be specified to create a DataFactory Delimited Text Dataset")
return fmt.Errorf("One of `http_server_location`, `azure_blob_storage_location` must be specified to create a DataFactory Delimited Text Dataset")
}

jsonDatasetProperties := datafactory.JSONDatasetTypeProperties{
Expand Down Expand Up @@ -357,8 +357,8 @@ func resourceArmDataFactoryDatasetJSONRead(d *schema.ResourceData, meta interfac
}
}
if azureBlobStorageLocation, ok := properties.Location.AsAzureBlobStorageLocation(); ok {
if err := d.Set("http_server_location", flattenDataFactoryDatasetAzureBlobStorageLocation(azureBlobStorageLocation)); err != nil {
return fmt.Errorf("Error setting `http_server_location` for Data Factory Delimited Text Dataset %s", err)
if err := d.Set("azure_blob_storage_location", flattenDataFactoryDatasetAzureBlobStorageLocation(azureBlobStorageLocation)); err != nil {
return fmt.Errorf("Error setting `azure_blob_storage_location` for Data Factory Delimited Text Dataset %s", err)
}
}

Expand Down
Loading