Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

New Resource - azurerm_data_factory_linked_service_blob_[blob|storage|sfpt] #6366

Merged
merged 30 commits into from
Jul 8, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
30 commits
Select commit Hold shift + click to select a range
0e95fb1
web and sftp data factory resources
Apr 6, 2020
2cd1cbf
fixed accTests
Apr 6, 2020
833487e
fixed accTests
Apr 6, 2020
7b3cc83
removed changes
Apr 6, 2020
eea40a2
added some extra datasets / linked services
Apr 17, 2020
77d07c6
renamed azure blob storage linked service
Apr 17, 2020
cb080e1
code review
Apr 17, 2020
8e5bdff
linrest
Apr 17, 2020
85c978a
cleanup on acc tests
Apr 20, 2020
ab42d0d
Merge branch 'master' of https://github.com/terraform-providers/terra…
Apr 20, 2020
2d4c7f7
Update azurerm/internal/services/datafactory/resource_arm_data_factor…
tracypholmes May 15, 2020
fdf6386
Update azurerm/internal/services/datafactory/tests/resource_arm_data_…
tracypholmes May 15, 2020
0ea12fa
Update azurerm/internal/services/datafactory/resource_arm_data_factor…
tracypholmes May 15, 2020
644737c
Update azurerm/internal/services/datafactory/resource_arm_data_factor…
tracypholmes May 15, 2020
395cf34
Updates `connection_string` to `Sensitive`
tracypholmes May 15, 2020
404321d
Removes feature flag
tracypholmes May 15, 2020
8b3e8ca
Updates all tests' resource group with `df`
tracypholmes May 15, 2020
6661ba8
Merge branch 'master' of https://github.com/markti/terraform-provider…
May 20, 2020
b195f8f
Merge branch 'master' of https://github.com/terraform-providers/terra…
May 20, 2020
82e0fbf
updated all documentation for datafactory res
May 21, 2020
dfc0d43
dropped the azurerm_ prefix to acceptance tests
May 21, 2020
a0e38bb
add data import step to acc tests
May 21, 2020
279613f
basic auth / cosmos endpoint
Jun 20, 2020
d4c3a66
Merge branch 'master' of https://github.com/terraform-providers/terra…
Jun 20, 2020
94a12ff
updates
Jun 21, 2020
9810d2f
update for website lint
Jun 21, 2020
17704a5
added azure file storage and az fn
Jun 22, 2020
c6a704f
added an additional update test case
Jun 22, 2020
7cc55b9
rename some files & move tests to service package
katbyte Jul 8, 2020
3b86f3f
Merge branch 'master' into master-6366
katbyte Jul 8, 2020
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package tests
package datafactory_test

import (
"fmt"
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,347 @@
package datafactory

import (
"fmt"
"log"
"time"

"github.com/Azure/azure-sdk-for-go/services/datafactory/mgmt/2018-06-01/datafactory"
"github.com/hashicorp/terraform-plugin-sdk/helper/schema"
"github.com/hashicorp/terraform-plugin-sdk/helper/validation"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils"
)

func resourceArmDataFactoryDatasetAzureBlob() *schema.Resource {
return &schema.Resource{
Create: resourceArmDataFactoryDatasetAzureBlobCreateUpdate,
Read: resourceArmDataFactoryDatasetAzureBlobRead,
Update: resourceArmDataFactoryDatasetAzureBlobCreateUpdate,
Delete: resourceArmDataFactoryDatasetAzureBlobDelete,

Importer: &schema.ResourceImporter{
State: schema.ImportStatePassthrough,
},

Timeouts: &schema.ResourceTimeout{
Create: schema.DefaultTimeout(30 * time.Minute),
Read: schema.DefaultTimeout(5 * time.Minute),
Update: schema.DefaultTimeout(30 * time.Minute),
Delete: schema.DefaultTimeout(30 * time.Minute),
},

Schema: map[string]*schema.Schema{
"name": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
ValidateFunc: validateAzureRMDataFactoryLinkedServiceDatasetName,
},

"data_factory_name": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
ValidateFunc: validate.DataFactoryName(),
},

// There's a bug in the Azure API where this is returned in lower-case
// BUG: https://github.com/Azure/azure-rest-api-specs/issues/5788
"resource_group_name": azure.SchemaResourceGroupNameDiffSuppress(),

"linked_service_name": {
Type: schema.TypeString,
Required: true,
ValidateFunc: validation.StringIsNotEmpty,
},

// Blob Storage Specific field
"path": {
Type: schema.TypeString,
Optional: true,
ValidateFunc: validation.StringIsNotEmpty,
},

// Blob Storage Specific field
"filename": {
Type: schema.TypeString,
Optional: true,
ValidateFunc: validation.StringIsNotEmpty,
},

"parameters": {
Type: schema.TypeMap,
Optional: true,
Elem: &schema.Schema{
Type: schema.TypeString,
},
},

"description": {
Type: schema.TypeString,
Optional: true,
ValidateFunc: validation.StringIsNotEmpty,
},

"annotations": {
Type: schema.TypeList,
Optional: true,
Elem: &schema.Schema{
Type: schema.TypeString,
},
},

"folder": {
Type: schema.TypeString,
Optional: true,
ValidateFunc: validation.StringIsNotEmpty,
},

"additional_properties": {
Type: schema.TypeMap,
Optional: true,
Elem: &schema.Schema{
Type: schema.TypeString,
},
},

"schema_column": {
Type: schema.TypeList,
Optional: true,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"name": {
Type: schema.TypeString,
Required: true,
ValidateFunc: validation.StringIsNotEmpty,
},
"type": {
Type: schema.TypeString,
Optional: true,
ValidateFunc: validation.StringInSlice([]string{
"Byte",
"Byte[]",
"Boolean",
"Date",
"DateTime",
"DateTimeOffset",
"Decimal",
"Double",
"Guid",
"Int16",
"Int32",
"Int64",
"Single",
"String",
"TimeSpan",
}, false),
},
"description": {
Type: schema.TypeString,
Optional: true,
ValidateFunc: validation.StringIsNotEmpty,
},
},
},
},
},
}
}

func resourceArmDataFactoryDatasetAzureBlobCreateUpdate(d *schema.ResourceData, meta interface{}) error {
client := meta.(*clients.Client).DataFactory.DatasetClient
ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d)
defer cancel()

name := d.Get("name").(string)
dataFactoryName := d.Get("data_factory_name").(string)
resourceGroup := d.Get("resource_group_name").(string)

if d.IsNewResource() {
existing, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "")
if err != nil {
if !utils.ResponseWasNotFound(existing.Response) {
return fmt.Errorf("Error checking for presence of existing Data Factory Dataset Azure Blob %q (Data Factory %q / Resource Group %q): %s", name, dataFactoryName, resourceGroup, err)
}
}

if existing.ID != nil && *existing.ID != "" {
return tf.ImportAsExistsError("azurerm_data_factory_dataset_delimited_text", *existing.ID)
}
}

linkedServiceName := d.Get("linked_service_name").(string)
linkedServiceType := "LinkedServiceReference"
linkedService := &datafactory.LinkedServiceReference{
ReferenceName: &linkedServiceName,
Type: &linkedServiceType,
}

description := d.Get("description").(string)

azureBlobTableset := datafactory.AzureBlobDataset{
AzureBlobDatasetTypeProperties: &datafactory.AzureBlobDatasetTypeProperties{
FolderPath: d.Get("path").(string),
FileName: d.Get("filename").(string),
},
LinkedServiceName: linkedService,
Description: &description,
}

if v, ok := d.GetOk("folder"); ok {
name := v.(string)
azureBlobTableset.Folder = &datafactory.DatasetFolder{
Name: &name,
}
}

if v, ok := d.GetOk("parameters"); ok {
azureBlobTableset.Parameters = expandDataFactoryParameters(v.(map[string]interface{}))
}

if v, ok := d.GetOk("annotations"); ok {
annotations := v.([]interface{})
azureBlobTableset.Annotations = &annotations
}

if v, ok := d.GetOk("additional_properties"); ok {
azureBlobTableset.AdditionalProperties = v.(map[string]interface{})
}

if v, ok := d.GetOk("schema_column"); ok {
azureBlobTableset.Structure = expandDataFactoryDatasetStructure(v.([]interface{}))
}

datasetType := string(datafactory.TypeAzureBlob)
dataset := datafactory.DatasetResource{
Properties: &azureBlobTableset,
Type: &datasetType,
}

if _, err := client.CreateOrUpdate(ctx, resourceGroup, dataFactoryName, name, dataset, ""); err != nil {
return fmt.Errorf("Error creating/updating Data Factory Dataset Azure Blob %q (Data Factory %q / Resource Group %q): %s", name, dataFactoryName, resourceGroup, err)
}

resp, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "")
if err != nil {
return fmt.Errorf("Error retrieving Data Factory Dataset Azure Blob %q (Data Factory %q / Resource Group %q): %s", name, dataFactoryName, resourceGroup, err)
}

if resp.ID == nil {
return fmt.Errorf("Cannot read Data Factory Dataset Azure Blob %q (Data Factory %q / Resource Group %q): %s", name, dataFactoryName, resourceGroup, err)
}

d.SetId(*resp.ID)

return resourceArmDataFactoryDatasetAzureBlobRead(d, meta)
}

func resourceArmDataFactoryDatasetAzureBlobRead(d *schema.ResourceData, meta interface{}) error {
client := meta.(*clients.Client).DataFactory.DatasetClient
ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d)
defer cancel()

id, err := azure.ParseAzureResourceID(d.Id())
if err != nil {
return err
}
resourceGroup := id.ResourceGroup
dataFactoryName := id.Path["factories"]
name := id.Path["datasets"]

resp, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "")
if err != nil {
if utils.ResponseWasNotFound(resp.Response) {
d.SetId("")
return nil
}

return fmt.Errorf("Error retrieving Data Factory Dataset Azure Blob %q (Data Factory %q / Resource Group %q): %s", name, dataFactoryName, resourceGroup, err)
}

d.Set("name", resp.Name)
d.Set("resource_group_name", resourceGroup)
d.Set("data_factory_name", dataFactoryName)

azureBlobTable, ok := resp.Properties.AsAzureBlobDataset()
if !ok {
return fmt.Errorf("Error classifiying Data Factory Dataset Azure Blob %q (Data Factory %q / Resource Group %q): Expected: %q Received: %q", name, dataFactoryName, resourceGroup, datafactory.TypeRelationalTable, *resp.Type)
}

d.Set("additional_properties", azureBlobTable.AdditionalProperties)

if azureBlobTable.Description != nil {
d.Set("description", azureBlobTable.Description)
}

parameters := flattenDataFactoryParameters(azureBlobTable.Parameters)
if err := d.Set("parameters", parameters); err != nil {
return fmt.Errorf("Error setting `parameters`: %+v", err)
}

annotations := flattenDataFactoryAnnotations(azureBlobTable.Annotations)
if err := d.Set("annotations", annotations); err != nil {
return fmt.Errorf("Error setting `annotations`: %+v", err)
}

if linkedService := azureBlobTable.LinkedServiceName; linkedService != nil {
if linkedService.ReferenceName != nil {
d.Set("linked_service_name", linkedService.ReferenceName)
}
}

if properties := azureBlobTable.AzureBlobDatasetTypeProperties; properties != nil {
filename, ok := properties.FileName.(string)
if !ok {
log.Printf("[DEBUG] Skipping `filename` since it's not a string")
} else {
d.Set("filename", filename)
}
path, ok := properties.FolderPath.(string)
if !ok {
log.Printf("[DEBUG] Skipping `path` since it's not a string")
} else {
d.Set("path", path)
}
}

if folder := azureBlobTable.Folder; folder != nil {
if folder.Name != nil {
d.Set("folder", folder.Name)
}
}

structureColumns := flattenDataFactoryStructureColumns(azureBlobTable.Structure)
if err := d.Set("schema_column", structureColumns); err != nil {
return fmt.Errorf("Error setting `schema_column`: %+v", err)
}

return nil
}

func resourceArmDataFactoryDatasetAzureBlobDelete(d *schema.ResourceData, meta interface{}) error {
client := meta.(*clients.Client).DataFactory.DatasetClient
ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d)
defer cancel()

id, err := azure.ParseAzureResourceID(d.Id())
if err != nil {
return err
}
resourceGroup := id.ResourceGroup
dataFactoryName := id.Path["factories"]
name := id.Path["datasets"]

response, err := client.Delete(ctx, resourceGroup, dataFactoryName, name)
if err != nil {
if !utils.ResponseWasNotFound(response) {
return fmt.Errorf("Error deleting Data Factory Dataset Azure Blob %q (Data Factory %q / Resource Group %q): %s", name, dataFactoryName, resourceGroup, err)
}
}

return nil
}
Loading