diff --git a/azurerm/internal/services/storage/client/client.go b/azurerm/internal/services/storage/client/client.go index 8b781b5d9adea..553963416aeb0 100644 --- a/azurerm/internal/services/storage/client/client.go +++ b/azurerm/internal/services/storage/client/client.go @@ -27,6 +27,7 @@ type Client struct { ManagementPoliciesClient storage.ManagementPoliciesClient BlobServicesClient storage.BlobServicesClient CachesClient *storagecache.CachesClient + StorageTargetsClient *storagecache.StorageTargetsClient BlobAccountsClient *accounts.Client environment az.Environment @@ -49,6 +50,9 @@ func NewClient(options *common.ClientOptions) *Client { cachesClient := storagecache.NewCachesClientWithBaseURI(options.ResourceManagerEndpoint, options.SubscriptionId) options.ConfigureClient(&cachesClient.Client, options.ResourceManagerAuthorizer) + storageTargetsClient := storagecache.NewStorageTargetsClientWithBaseURI(options.ResourceManagerEndpoint, options.SubscriptionId) + options.ConfigureClient(&storageTargetsClient.Client, options.ResourceManagerAuthorizer) + blobAccountsClient := accounts.NewWithEnvironment(options.Environment) options.ConfigureClient(&blobAccountsClient.Client, options.StorageAuthorizer) @@ -60,6 +64,7 @@ func NewClient(options *common.ClientOptions) *Client { ManagementPoliciesClient: managementPoliciesClient, BlobServicesClient: blobServicesClient, CachesClient: &cachesClient, + StorageTargetsClient: &storageTargetsClient, BlobAccountsClient: &blobAccountsClient, environment: options.Environment, } diff --git a/azurerm/internal/services/storage/parsers/hpc_cache_target.go b/azurerm/internal/services/storage/parsers/hpc_cache_target.go new file mode 100644 index 0000000000000..d1d490de33d66 --- /dev/null +++ b/azurerm/internal/services/storage/parsers/hpc_cache_target.go @@ -0,0 +1,38 @@ +package parsers + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type HPCCacheTargetId struct { + ResourceGroup string + Cache string + Name string +} + +func HPCCacheTargetID(input string) (*HPCCacheTargetId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, fmt.Errorf("[ERROR] Unable to parse HPC Cache Target ID %q: %+v", input, err) + } + + target := HPCCacheTargetId{ + ResourceGroup: id.ResourceGroup, + } + + if target.Cache, err = id.PopSegment("caches"); err != nil { + return nil, err + } + + if target.Name, err = id.PopSegment("storageTargets"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &target, nil +} diff --git a/azurerm/internal/services/storage/parsers/hpc_cache_target_test.go b/azurerm/internal/services/storage/parsers/hpc_cache_target_test.go new file mode 100644 index 0000000000000..11a055cea2567 --- /dev/null +++ b/azurerm/internal/services/storage/parsers/hpc_cache_target_test.go @@ -0,0 +1,89 @@ +package parsers + +import ( + "testing" +) + +func TestHPCCacheTargetID(t *testing.T) { + testData := []struct { + Name string + Input string + Error bool + Expect *HPCCacheTargetId + }{ + { + Name: "Empty", + Input: "", + Error: true, + }, + { + Name: "No Resource Groups Segment", + Input: "/subscriptions/00000000-0000-0000-0000-000000000000", + Error: true, + }, + { + Name: "No Resource Groups Value", + Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", + Error: true, + }, + { + Name: "Resource Group ID", + Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/", + Error: true, + }, + { + Name: "Missing Cache Value", + Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.StorageCache/caches/", + Error: true, + }, + { + Name: "With Cache Value", + Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.StorageCache/caches/cache1", + Error: true, + }, + { + Name: "Missing Storage Target Value", + Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.StorageCache/caches/cache1/storageTargets", + Error: true, + }, + { + Name: "With Storage Target Value", + Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.StorageCache/caches/cache1/storageTargets/target1", + Expect: &HPCCacheTargetId{ + ResourceGroup: "resGroup1", + Cache: "cache1", + Name: "target1", + }, + }, + { + Name: "Wrong Casing", + Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.StorageCache/caches/cache1/StorageTargets/target1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Name) + + actual, err := HPCCacheTargetID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expected a value but got an error: %s", err) + } + + if actual.ResourceGroup != v.Expect.ResourceGroup { + t.Fatalf("Expected %q but got %q for Resource Group", v.Expect.ResourceGroup, actual.ResourceGroup) + } + + if actual.Cache != v.Expect.Cache { + t.Fatalf("Expected %q but got %q for Cache", v.Expect.Cache, actual.Cache) + } + + if actual.Name != v.Expect.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expect.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/storage/registration.go b/azurerm/internal/services/storage/registration.go index 9f2f8f20de077..f78ca8d0d4758 100644 --- a/azurerm/internal/services/storage/registration.go +++ b/azurerm/internal/services/storage/registration.go @@ -33,6 +33,7 @@ func (r Registration) SupportedDataSources() map[string]*schema.Resource { func (r Registration) SupportedResources() map[string]*schema.Resource { return map[string]*schema.Resource{ "azurerm_hpc_cache": resourceArmHPCCache(), + "azurerm_hpc_cache_blob_target": resourceArmHPCCacheBlobTarget(), "azurerm_storage_account": resourceArmStorageAccount(), "azurerm_storage_account_customer_managed_key": resourceArmStorageAccountCustomerManagedKey(), "azurerm_storage_account_network_rules": resourceArmStorageAccountNetworkRules(), diff --git a/azurerm/internal/services/storage/resource_arm_hpc_cache_blob_target.go b/azurerm/internal/services/storage/resource_arm_hpc_cache_blob_target.go new file mode 100644 index 0000000000000..bb7b7e72ad29f --- /dev/null +++ b/azurerm/internal/services/storage/resource_arm_hpc_cache_blob_target.go @@ -0,0 +1,272 @@ +package storage + +import ( + "context" + "fmt" + "log" + "time" + + "github.com/Azure/azure-sdk-for-go/services/storagecache/mgmt/2019-11-01/storagecache" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/features" + storage "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/storage/client" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/storage/parsers" + storageValidate "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/storage/validate" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" + "github.com/tombuildsstuff/giovanni/storage/2018-11-09/blob/containers" +) + +func resourceArmHPCCacheBlobTarget() *schema.Resource { + return &schema.Resource{ + Create: resourceArmHPCCacheBlobTargetCreateOrUpdate, + Update: resourceArmHPCCacheBlobTargetCreateOrUpdate, + Read: resourceArmHPCCacheBlobTargetRead, + Delete: resourceArmHPCCacheBlobTargetDelete, + + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parsers.HPCCacheTargetID(id) + return err + }), + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: storageValidate.HPCCacheTargetName, + }, + + "resource_group_name": azure.SchemaResourceGroupName(), + + "cache_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "namespace_path": { + Type: schema.TypeString, + Required: true, + ValidateFunc: storageValidate.HPCCacheNamespacePath, + }, + + "storage_container_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + }, + } +} + +func resourceArmHPCCacheBlobTargetCreateOrUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Storage.StorageTargetsClient + ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + log.Printf("[INFO] preparing arguments for Azure HPC Cache Blob Target creation.") + name := d.Get("name").(string) + resourceGroup := d.Get("resource_group_name").(string) + cache := d.Get("cache_name").(string) + + if features.ShouldResourcesBeImported() && d.IsNewResource() { + resp, err := client.Get(ctx, resourceGroup, cache, name) + if err != nil { + if !utils.ResponseWasNotFound(resp.Response) { + return fmt.Errorf("Error checking for existing HPC Cache Blob Target %q (Resource Group %q): %+v", name, resourceGroup, err) + } + } + + if !utils.ResponseWasNotFound(resp.Response) { + return tf.ImportAsExistsError("azurerm_hpc_cache_blob_target", *resp.ID) + } + } + + namespacePath := d.Get("namespace_path").(string) + containerId := d.Get("storage_container_id").(string) + + // We need to convert storage container id from the form used in side hashicorp azure storage sdk + // to the form used in original azure storage sdk, which is expected by storage target client. + + azureContainerId, err := toAzureStorageContianerID( + containerId, + meta.(*clients.Client).Account.SubscriptionId, + resourceGroup, + ) + if err != nil { + return fmt.Errorf("Error converting container id to azure container id (HPC Cache Blob Target %q, Resource Group %q, Cahe %q): %w", name, resourceGroup, cache, err) + } + + // Construct parameters + namespaceJunction := []storagecache.NamespaceJunction{ + { + NamespacePath: &namespacePath, + TargetPath: utils.String("/"), + }, + } + param := &storagecache.StorageTarget{ + StorageTargetProperties: &storagecache.StorageTargetProperties{ + Junctions: &namespaceJunction, + TargetType: storagecache.StorageTargetTypeClfs, + Clfs: &storagecache.ClfsTarget{ + Target: &azureContainerId, + }, + }, + } + + future, err := client.CreateOrUpdate(ctx, resourceGroup, cache, name, param) + if err != nil { + return fmt.Errorf("Error creating HPC Cache Blob Target %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + if err := future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("Error waiting for creation of HPC Cache Blob Target %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + read, err := client.Get(ctx, resourceGroup, cache, name) + if err != nil { + return fmt.Errorf("Error retrieving HPC Cache Blob Target %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + if read.ID == nil { + return fmt.Errorf("Error retrieving HPC Cache Blob Target %q (Resource Group %q): `id` was nil", name, resourceGroup) + } + + d.SetId(*read.ID) + + return resourceArmHPCCacheBlobTargetRead(d, meta) +} + +func resourceArmHPCCacheBlobTargetRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Storage.StorageTargetsClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parsers.HPCCacheTargetID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.Cache, id.Name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + log.Printf("[DEBUG] HPC Cache Blob Target %q was not found (Resource Group %q, Cahe %q) - removing from state!", id.Name, id.ResourceGroup, id.Cache) + d.SetId("") + return nil + } + return fmt.Errorf("Error retrieving HPC Cache Blob Target %q (Resource Group %q, Cahe %q): %+v", id.Name, id.ResourceGroup, id.Cache, err) + } + + d.Set("name", id.Name) + d.Set("resource_group_name", id.ResourceGroup) + d.Set("cache_name", id.Cache) + + if resp.StorageTargetProperties == nil { + return fmt.Errorf("Error retrieving HPC Cache Blob Target %q (Resource Group %q, Cahe %q): `properties` was nil", id.Name, id.ResourceGroup, id.Cache) + } + props := *resp.StorageTargetProperties + + containerId := "" + if props.Clfs != nil && props.Clfs.Target != nil { + // Convert container id from azure form to the form used in current + // storage sdk package. + azureContainerId := *props.Clfs.Target + var err error + containerId, err = fromAzureStorageContainerID(ctx, meta.(*clients.Client).Storage, azureContainerId) + if err != nil { + return fmt.Errorf("Error converting container id from azure container id (HPC Cache Blob Target %q, Resource Group %q, Cahe %q): %w", id.Name, id.ResourceGroup, id.Cache, err) + } + } + d.Set("storage_container_id", containerId) + + namespacePath := "" + // There is only one namespace path allowed for blob container storage target, + // which maps to the root path of it. + if props.Junctions != nil && len(*props.Junctions) == 1 && (*props.Junctions)[0].NamespacePath != nil { + namespacePath = *(*props.Junctions)[0].NamespacePath + } + d.Set("namespace_path", namespacePath) + + return nil +} + +func resourceArmHPCCacheBlobTargetDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Storage.StorageTargetsClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parsers.HPCCacheTargetID(d.Id()) + if err != nil { + return err + } + + future, err := client.Delete(ctx, id.ResourceGroup, id.Cache, id.Name) + if err != nil { + return fmt.Errorf("Error deleting HPC Cache Blob Target %q (Resource Group %q, Cahe %q): %+v", id.Name, id.ResourceGroup, id.Cache, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("Error wating for deletiion of HPC Cache Blob Target %q (Resource Group %q, Cahe %q): %+v", id.Name, id.ResourceGroup, id.Cache, err) + } + + return nil +} + +// toAzureStorageContianerID convert container id from form used in "giovanni" +// package to form used in azure. +func toAzureStorageContianerID(id, subid, resgroup string) (string, error) { + idinfo, err := containers.ParseResourceID(id) + if err != nil { + return "", err + } + return fmt.Sprintf("/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Storage/storageAccounts/%s/blobServices/default/containers/%s", + subid, resgroup, idinfo.AccountName, idinfo.ContainerName), nil +} + +// fromAzureStorageContainerID convert container id from form used in azure +// to form used in "giovanni" package. +func fromAzureStorageContainerID(ctx context.Context, storageClient *storage.Client, input string) (string, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return "", err + } + var accountName, containerName string + + if accountName, err = id.PopSegment("storageAccounts"); err != nil { + return "", err + } + + if containerName, err = id.PopSegment("containers"); err != nil { + return "", err + } + + account, err := storageClient.FindAccount(ctx, accountName) + if err != nil { + return "", fmt.Errorf("Error retrieving Account %q for Container %q: %s", accountName, containerName, err) + } + if account == nil { + return "", fmt.Errorf("Unable to locate Storage Account %q!", accountName) + } + + client, err := storageClient.ContainersClient(ctx, *account) + if err != nil { + return "", fmt.Errorf("Error building Containers Client: %s", err) + } + + return client.GetResourceID(accountName, containerName), nil +} diff --git a/azurerm/internal/services/storage/tests/resource_arm_hpc_cache_blob_target_test.go b/azurerm/internal/services/storage/tests/resource_arm_hpc_cache_blob_target_test.go new file mode 100644 index 0000000000000..6cb12d61bc7d0 --- /dev/null +++ b/azurerm/internal/services/storage/tests/resource_arm_hpc_cache_blob_target_test.go @@ -0,0 +1,217 @@ +package tests + +import ( + "fmt" + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/storage/parsers" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/features" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func TestAccAzureRMHPCCacheBlobTarget_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hpc_cache_blob_target", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMHPCCacheBlobTargetDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMHPCCacheBlobTarget_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMHPCCacheBlobTargetExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccAzureRMHPCCacheBlobTarget_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hpc_cache_blob_target", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMHPCCacheBlobTargetDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMHPCCacheBlobTarget_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMHPCCacheBlobTargetExists(data.ResourceName), + ), + }, + data.ImportStep(), + { + Config: testAccAzureRMHPCCacheBlobTarget_namespace(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMHPCCacheBlobTargetExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccAzureRMHPCCacheBlobTarget_requiresImport(t *testing.T) { + if !features.ShouldResourcesBeImported() { + t.Skip("Skipping since resources aren't required to be imported") + return + } + + data := acceptance.BuildTestData(t, "azurerm_hpc_cache_blob_target", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMHPCCacheBlobTargetDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMHPCCacheBlobTarget_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMHPCCacheBlobTargetExists(data.ResourceName), + ), + }, + data.RequiresImportErrorStep(testAccAzureRMHPCCacheBlobTarget_requiresImport), + }, + }) +} + +func testCheckAzureRMHPCCacheBlobTargetExists(resourceName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("HPC Cache Blob Target not found: %s", resourceName) + } + + id, err := parsers.HPCCacheTargetID(rs.Primary.ID) + if err != nil { + return err + } + + client := acceptance.AzureProvider.Meta().(*clients.Client).Storage.StorageTargetsClient + ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + + if resp, err := client.Get(ctx, id.ResourceGroup, id.Cache, id.Name); err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return fmt.Errorf("Bad: HPC Cache Blob Target %q (Resource Group %q) does not exist", id.Name, id.ResourceGroup) + } + return fmt.Errorf("Bad: Get on Storage.StorageTargetsClient: %+v", err) + } + + return nil + } +} + +func testCheckAzureRMHPCCacheBlobTargetDestroy(s *terraform.State) error { + client := acceptance.AzureProvider.Meta().(*clients.Client).Storage.StorageTargetsClient + ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + + for _, rs := range s.RootModule().Resources { + if rs.Type != "azurerm_hpc_cache_blob_target" { + continue + } + + id, err := parsers.HPCCacheTargetID(rs.Primary.ID) + if err != nil { + return err + } + + if resp, err := client.Get(ctx, id.ResourceGroup, id.Cache, id.Name); err != nil { + if !utils.ResponseWasNotFound(resp.Response) { + return fmt.Errorf("Bad: Get on Storage.StorageTargetsClient: %+v", err) + } + } + + return nil + } + + return nil +} + +func testAccAzureRMHPCCacheBlobTarget_basic(data acceptance.TestData) string { + template := testAccAzureRMHPCCacheBlobTarget_template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_hpc_cache_blob_target" "test" { + name = "acctest-HPCCTGT-%s" + resource_group_name = azurerm_resource_group.test.name + cache_name = azurerm_hpc_cache.test.name + storage_container_id = azurerm_storage_container.test.id + namespace_path = "/blob_storage1" +} +`, template, data.RandomString) +} + +func testAccAzureRMHPCCacheBlobTarget_namespace(data acceptance.TestData) string { + template := testAccAzureRMHPCCacheBlobTarget_template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_hpc_cache_blob_target" "test" { + name = "acctest-HPCCTGT-%s" + resource_group_name = azurerm_resource_group.test.name + cache_name = azurerm_hpc_cache.test.name + storage_container_id = azurerm_storage_container.test.id + namespace_path = "/blob_storage2" +} +`, template, data.RandomString) +} + +func testAccAzureRMHPCCacheBlobTarget_requiresImport(data acceptance.TestData) string { + template := testAccAzureRMHPCCacheBlobTarget_basic(data) + return fmt.Sprintf(` +%s + +resource "azurerm_hpc_cache_blob_target" "import" { + name = azurerm_hpc_cache_blob_target.test.name + resource_group_name = azurerm_hpc_cache_blob_target.test.resource_group_name + cache_name = azurerm_hpc_cache_blob_target.test.cache_name + storage_container_id = azurerm_hpc_cache_blob_target.test.storage_container_id + namespace_path = azurerm_hpc_cache_blob_target.test.namespace_path +} +`, template) +} + +func testAccAzureRMHPCCacheBlobTarget_template(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azuread_service_principal" "test" { + display_name = "HPC Cache Resource Provider" +} + +resource "azurerm_storage_account" "test" { + name = "accteststorgacc%[2]s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_storage_container" "test" { + name = "acctest-strgctn-%[2]s" + storage_account_name = azurerm_storage_account.test.name +} + + +resource "azurerm_role_assignment" "test_storage_account_contrib" { + scope = azurerm_storage_account.test.id + role_definition_name = "Storage Account Contributor" + principal_id = data.azuread_service_principal.test.object_id +} + +resource "azurerm_role_assignment" "test_storage_blob_data_contrib" { + scope = azurerm_storage_account.test.id + role_definition_name = "Storage Blob Data Contributor" + principal_id = data.azuread_service_principal.test.object_id +} +`, testAccAzureRMHPCCache_basic(data), data.RandomString) +} diff --git a/azurerm/internal/services/storage/validate/hpc_cache_target.go b/azurerm/internal/services/storage/validate/hpc_cache_target.go new file mode 100644 index 0000000000000..3754885dcabc2 --- /dev/null +++ b/azurerm/internal/services/storage/validate/hpc_cache_target.go @@ -0,0 +1,36 @@ +package validate + +import ( + "errors" + "fmt" + "regexp" + "strings" +) + +func HPCCacheTargetName(i interface{}, k string) (warnings []string, errors []error) { + v, ok := i.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) + return + } + exp := `^[-0-9a-zA-Z_]{1,31}$` + p := regexp.MustCompile(exp) + if !p.MatchString(v) { + errors = append(errors, fmt.Errorf(`cache target name doesn't comply with regexp: "%s"`, exp)) + } + + return warnings, errors +} + +func HPCCacheNamespacePath(i interface{}, k string) (warnings []string, errs []error) { + v, ok := i.(string) + if !ok { + errs = append(errs, fmt.Errorf("expected type of %q to be string", k)) + return + } + + if !strings.HasPrefix(v, "/") { + errs = append(errs, errors.New(`namespace path should start with "/"`)) + } + return warnings, errs +} diff --git a/website/azurerm.erb b/website/azurerm.erb index 9d9b4d5ec9123..3bfb298d0d144 100644 --- a/website/azurerm.erb +++ b/website/azurerm.erb @@ -2188,6 +2188,10 @@ azurerm_hpc_cache +
  • + azurerm_hpc_cache_blob_target +
  • +
  • azurerm_storage_account
  • diff --git a/website/docs/r/hpc_cache_blob_target.html.markdown b/website/docs/r/hpc_cache_blob_target.html.markdown new file mode 100644 index 0000000000000..2cc9385394016 --- /dev/null +++ b/website/docs/r/hpc_cache_blob_target.html.markdown @@ -0,0 +1,117 @@ +--- +subcategory: "Storage" +layout: "azurerm" +page_title: "Azure Resource Manager: azurerm_hpc_cache_blob_target" +description: |- + Manages a HPC Cache Blob Target. +--- + +# azurerm_hpc_cache + +Manages a HPC Cache Blob Target. + +## Example Usage + +```hcl +resource "azurerm_resource_group" "example" { + name = "example-resources" + location = "West Europe" +} + +resource "azurerm_virtual_network" "example" { + name = "examplevn" + address_space = ["10.0.0.0/16"] + location = azurerm_resource_group.example.location + resource_group_name = azurerm_resource_group.example.name +} + +resource "azurerm_subnet" "example" { + name = "examplesubnet" + resource_group_name = azurerm_resource_group.example.name + virtual_network_name = azurerm_virtual_network.example.name + address_prefix = "10.0.1.0/24" +} + +resource "azurerm_hpc_cache" "example" { + name = "examplehpccache" + resource_group_name = azurerm_resource_group.example.name + location = azurerm_resource_group.example.location + cache_size_in_gb = 3072 + subnet_id = azurerm_subnet.example.id + sku_name = "Standard_2G" +} + +resource "azurerm_storage_account" "example" { + name = "examplestorgaccount" + resource_group_name = azurerm_resource_group.example.name + location = azurerm_resource_group.example.location + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_storage_container" "example" { + name = "examplestoragecontainer" + storage_account_name = azurerm_storage_account.example.name +} + +data "azuread_service_principal" "example" { + display_name = "HPC Cache Resource Provider" +} + +resource "azurerm_role_assignment" "example_storage_account_contrib" { + scope = azurerm_storage_account.example.id + role_definition_name = "Storage Account Contributor" + principal_id = data.azuread_service_principal.example.object_id +} + +resource "azurerm_role_assignment" "example_storage_blob_data_contrib" { + scope = azurerm_storage_account.example.id + role_definition_name = "Storage Blob Data Contributor" + principal_id = data.azuread_service_principal.example.object_id +} + +resource "azurerm_hpc_cache_blob_target" "example" { + name = "examplehpccblobtarget" + resource_group_name = azurerm_resource_group.example.name + cache_name = azurerm_hpc_cache.example.name + storage_container_id = azurerm_storage_container.example.id + namespace_path = "/blob_storage" +} +``` + +## Argument Reference + +The following arguments are supported: + +* `name` - (Required) The name of the HPC Cache Blob Target. Changing this forces a new resource to be created. + +* `resource_group_name` - (Required) The name of the Resource Group in which to create the HPC Cache Blob Target. Changing this forces a new resource to be created. + +* `cache_name` - (Required) The name HPC Cache, which the HPC Cache Blob Target will be added to. Changing this forces a new resource to be created. + +* `storage_container_id` - (Required) The ID of the Storage Container used as the HPC Cache Blob Target. Changing this forces a new resource to be created. + +* `namespace_path` - (Required) The client-facing file path of the HPC Cache Blob Target. + +## Attributes Reference + +The following attributes are exported: + +* `id` - The `id` of the HPC Cache Blob Target. + +## Timeouts + +The `timeouts` block allows you to specify [timeouts](https://www.terraform.io/docs/configuration/resources.html#timeouts) for certain actions: + +* `create` - (Defaults to 30 minutes) Used when creating the HPC Cache Blob Target. +* `read` - (Defaults to 5 minutes) Used when retrieving the HPC Cache Blob Target. +* `update` - (Defaults to 30 minutes) Used when updating the HPC Cache Blob Target. +* `delete` - (Defaults to 30 minutes) Used when deleting the HPC Cache Blob Target. + +## Import + +HPC Caches Blob Target can be imported using the `resource id`, e.g. + +```shell +terraform import azurerm_hpc_cache_blob_target.example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resourceGroupName/providers/Microsoft.StorageCache/caches/cacheName/storageTargets/targetName +```