From 79d02597a892f9ea01c832d5039e88cb8d3d2e05 Mon Sep 17 00:00:00 2001 From: Zhu Sheng Li Date: Thu, 26 Oct 2023 19:32:43 +0900 Subject: [PATCH 01/30] Fix build_batch_config cannot be removed problem fixes: #29261 --- internal/service/codebuild/project.go | 4 + internal/service/codebuild/project_test.go | 85 ++++++++++++++++++++++ 2 files changed, 89 insertions(+) diff --git a/internal/service/codebuild/project.go b/internal/service/codebuild/project.go index 4d055badb82..5aab1d698f3 100644 --- a/internal/service/codebuild/project.go +++ b/internal/service/codebuild/project.go @@ -1462,6 +1462,10 @@ func resourceProjectUpdate(ctx context.Context, d *schema.ResourceData, meta int if d.HasChange("build_batch_config") { input.BuildBatchConfig = expandBuildBatchConfig(d) + // If BuildBatchConfig is nil we should remove it by passing an empty struct. + if input.BuildBatchConfig == nil { + input.BuildBatchConfig = &codebuild.ProjectBuildBatchConfig{} + } } if d.HasChange("cache") { diff --git a/internal/service/codebuild/project_test.go b/internal/service/codebuild/project_test.go index 25147227952..6879a50c9fc 100644 --- a/internal/service/codebuild/project_test.go +++ b/internal/service/codebuild/project_test.go @@ -798,6 +798,45 @@ func TestAccCodeBuildProject_buildBatch(t *testing.T) { }) } +func TestAccCodeBuildProject_buildBatchConfigDelete(t *testing.T) { + ctx := acctest.Context(t) + var project codebuild.Project + rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) + resourceName := "aws_codebuild_project.test" + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, + CheckDestroy: testAccCheckProjectDestroy(ctx), + Steps: []resource.TestStep{ + { + Config: testAccProjectConfig_buildBatchConfigDelete(rName, true), + Check: resource.ComposeTestCheckFunc( + testAccCheckProjectExists(ctx, resourceName, &project), + resource.TestCheckResourceAttr(resourceName, "build_batch_config.0.combine_artifacts", "true"), + resource.TestCheckResourceAttr(resourceName, "build_batch_config.0.restrictions.#", "1"), + resource.TestCheckResourceAttr(resourceName, "build_batch_config.0.restrictions.0.compute_types_allowed.#", "0"), + resource.TestCheckResourceAttr(resourceName, "build_batch_config.0.restrictions.0.maximum_builds_allowed", "10"), + resource.TestCheckResourceAttr(resourceName, "build_batch_config.0.timeout_in_mins", "480"), + ), + }, + { + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccProjectConfig_buildBatchConfigDelete(rName, false), + Check: resource.ComposeTestCheckFunc( + testAccCheckProjectExists(ctx, resourceName, &project), + resource.TestCheckNoResourceAttr(resourceName, "build_batch_config.%"), + ), + }, + }, + }) +} + func TestAccCodeBuildProject_Source_gitCloneDepth(t *testing.T) { ctx := acctest.Context(t) var project codebuild.Project @@ -3421,6 +3460,52 @@ resource "aws_codebuild_project" "test" { `, rName, combineArtifacts, computeTypesAllowed, maximumBuildsAllowed, timeoutInMins)) } +func testAccProjectConfig_buildBatchConfigDelete(rName string, withBuildBatchConfig bool) string { + + template := ` +resource "aws_codebuild_project" "test" { + name = %[1]q + service_role = aws_iam_role.test.arn + + artifacts { + type = "NO_ARTIFACTS" + } + + environment { + compute_type = "BUILD_GENERAL1_SMALL" + image = "2" + type = "LINUX_CONTAINER" + } + + source { + location = "https://github.com/hashicorp/packer.git" + type = "GITHUB" + } + + %[2]s +} + ` + + buildBatchConfig := ` +build_batch_config { + combine_artifacts = true + + restrictions { + compute_types_allowed = [] + maximum_builds_allowed = 10 + } + + service_role = aws_iam_role.test.arn + timeout_in_mins = 480 +} + ` + + if withBuildBatchConfig { + return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(template, rName, buildBatchConfig)) + } + return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(template, rName, "")) +} + func testAccProjectConfig_s3Logs(rName, status, location string, encryptionDisabled bool) string { return acctest.ConfigCompose( testAccProjectConfig_Base_ServiceRole(rName), From 6a786786ef53c29d3d82f4a5e99890511ed3b2a0 Mon Sep 17 00:00:00 2001 From: Kit Ewbank Date: Mon, 22 Jan 2024 09:57:01 -0500 Subject: [PATCH 02/30] codebuild: Migrate to AWS SDK for Go v2. --- names/data/names_data.csv | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/names/data/names_data.csv b/names/data/names_data.csv index 5ff74a2101b..000a2675974 100644 --- a/names/data/names_data.csv +++ b/names/data/names_data.csv @@ -75,7 +75,7 @@ logs,logs,cloudwatchlogs,cloudwatchlogs,,logs,,cloudwatchlog;cloudwatchlogs,Logs rum,rum,cloudwatchrum,rum,,rum,,cloudwatchrum,RUM,CloudWatchRUM,,1,,,aws_rum_,,rum_,CloudWatch RUM,Amazon,,,,,,,RUM,,, synthetics,synthetics,synthetics,synthetics,,synthetics,,,Synthetics,Synthetics,,1,,,aws_synthetics_,,synthetics_,CloudWatch Synthetics,Amazon,,,,,,,synthetics,,, codeartifact,codeartifact,codeartifact,codeartifact,,codeartifact,,,CodeArtifact,CodeArtifact,,1,,,aws_codeartifact_,,codeartifact_,CodeArtifact,AWS,,,,,,,codeartifact,,, -codebuild,codebuild,codebuild,codebuild,,codebuild,,,CodeBuild,CodeBuild,,1,,,aws_codebuild_,,codebuild_,CodeBuild,AWS,,,,,,,CodeBuild,,, +codebuild,codebuild,codebuild,codebuild,,codebuild,,,CodeBuild,CodeBuild,,,2,,aws_codebuild_,,codebuild_,CodeBuild,AWS,,,,,,,CodeBuild,,, codecommit,codecommit,codecommit,codecommit,,codecommit,,,CodeCommit,CodeCommit,,,2,,aws_codecommit_,,codecommit_,CodeCommit,AWS,,,,,,,CodeCommit,,, deploy,deploy,codedeploy,codedeploy,,deploy,,codedeploy,Deploy,CodeDeploy,,,2,aws_codedeploy_,aws_deploy_,,codedeploy_,CodeDeploy,AWS,,,,,,,CodeDeploy,,, codeguruprofiler,codeguruprofiler,codeguruprofiler,codeguruprofiler,,codeguruprofiler,,,CodeGuruProfiler,CodeGuruProfiler,,,2,,aws_codeguruprofiler_,,codeguruprofiler_,CodeGuru Profiler,Amazon,,,,,,,CodeGuruProfiler,ListProfilingGroups,, From 027c51effa5867383a5e34b28c9089bd080357cc Mon Sep 17 00:00:00 2001 From: Kit Ewbank Date: Mon, 22 Jan 2024 09:58:52 -0500 Subject: [PATCH 03/30] Run 'make gen'. --- internal/conns/awsclient_gen.go | 6 +++--- internal/service/codebuild/generate.go | 2 +- .../service/codebuild/service_package_gen.go | 17 ++++++++++------- internal/service/codebuild/tags_gen.go | 19 ++++++++++--------- 4 files changed, 24 insertions(+), 20 deletions(-) diff --git a/internal/conns/awsclient_gen.go b/internal/conns/awsclient_gen.go index e7e2e68f486..7f454dafc1c 100644 --- a/internal/conns/awsclient_gen.go +++ b/internal/conns/awsclient_gen.go @@ -20,6 +20,7 @@ import ( cleanrooms_sdkv2 "github.com/aws/aws-sdk-go-v2/service/cleanrooms" cloudcontrol_sdkv2 "github.com/aws/aws-sdk-go-v2/service/cloudcontrol" cloudwatchlogs_sdkv2 "github.com/aws/aws-sdk-go-v2/service/cloudwatchlogs" + codebuild_sdkv2 "github.com/aws/aws-sdk-go-v2/service/codebuild" codecatalyst_sdkv2 "github.com/aws/aws-sdk-go-v2/service/codecatalyst" codecommit_sdkv2 "github.com/aws/aws-sdk-go-v2/service/codecommit" codedeploy_sdkv2 "github.com/aws/aws-sdk-go-v2/service/codedeploy" @@ -136,7 +137,6 @@ import ( cloudwatch_sdkv1 "github.com/aws/aws-sdk-go/service/cloudwatch" cloudwatchrum_sdkv1 "github.com/aws/aws-sdk-go/service/cloudwatchrum" codeartifact_sdkv1 "github.com/aws/aws-sdk-go/service/codeartifact" - codebuild_sdkv1 "github.com/aws/aws-sdk-go/service/codebuild" codegurureviewer_sdkv1 "github.com/aws/aws-sdk-go/service/codegurureviewer" cognitoidentity_sdkv1 "github.com/aws/aws-sdk-go/service/cognitoidentity" cognitoidentityprovider_sdkv1 "github.com/aws/aws-sdk-go/service/cognitoidentityprovider" @@ -407,8 +407,8 @@ func (c *AWSClient) CodeArtifactConn(ctx context.Context) *codeartifact_sdkv1.Co return errs.Must(conn[*codeartifact_sdkv1.CodeArtifact](ctx, c, names.CodeArtifact, make(map[string]any))) } -func (c *AWSClient) CodeBuildConn(ctx context.Context) *codebuild_sdkv1.CodeBuild { - return errs.Must(conn[*codebuild_sdkv1.CodeBuild](ctx, c, names.CodeBuild, make(map[string]any))) +func (c *AWSClient) CodeBuildClient(ctx context.Context) *codebuild_sdkv2.Client { + return errs.Must(client[*codebuild_sdkv2.Client](ctx, c, names.CodeBuild, make(map[string]any))) } func (c *AWSClient) CodeCatalystClient(ctx context.Context) *codecatalyst_sdkv2.Client { diff --git a/internal/service/codebuild/generate.go b/internal/service/codebuild/generate.go index 38b80319dc1..580cdeb6ebe 100644 --- a/internal/service/codebuild/generate.go +++ b/internal/service/codebuild/generate.go @@ -1,7 +1,7 @@ // Copyright (c) HashiCorp, Inc. // SPDX-License-Identifier: MPL-2.0 -//go:generate go run ../../generate/tags/main.go -ServiceTagsSlice +//go:generate go run ../../generate/tags/main.go -AWSSDKVersion=2 -ServiceTagsSlice //go:generate go run ../../generate/servicepackage/main.go // ONLY generate directives and package declaration! Do not add anything else to this file. diff --git a/internal/service/codebuild/service_package_gen.go b/internal/service/codebuild/service_package_gen.go index 7f2fef33260..743028f1c4a 100644 --- a/internal/service/codebuild/service_package_gen.go +++ b/internal/service/codebuild/service_package_gen.go @@ -5,9 +5,8 @@ package codebuild import ( "context" - aws_sdkv1 "github.com/aws/aws-sdk-go/aws" - session_sdkv1 "github.com/aws/aws-sdk-go/aws/session" - codebuild_sdkv1 "github.com/aws/aws-sdk-go/service/codebuild" + aws_sdkv2 "github.com/aws/aws-sdk-go-v2/aws" + codebuild_sdkv2 "github.com/aws/aws-sdk-go-v2/service/codebuild" "github.com/hashicorp/terraform-provider-aws/internal/conns" "github.com/hashicorp/terraform-provider-aws/internal/types" "github.com/hashicorp/terraform-provider-aws/names" @@ -60,11 +59,15 @@ func (p *servicePackage) ServicePackageName() string { return names.CodeBuild } -// NewConn returns a new AWS SDK for Go v1 client for this service package's AWS API. -func (p *servicePackage) NewConn(ctx context.Context, config map[string]any) (*codebuild_sdkv1.CodeBuild, error) { - sess := config["session"].(*session_sdkv1.Session) +// NewClient returns a new AWS SDK for Go v2 client for this service package's AWS API. +func (p *servicePackage) NewClient(ctx context.Context, config map[string]any) (*codebuild_sdkv2.Client, error) { + cfg := *(config["aws_sdkv2_config"].(*aws_sdkv2.Config)) - return codebuild_sdkv1.New(sess.Copy(&aws_sdkv1.Config{Endpoint: aws_sdkv1.String(config["endpoint"].(string))})), nil + return codebuild_sdkv2.NewFromConfig(cfg, func(o *codebuild_sdkv2.Options) { + if endpoint := config["endpoint"].(string); endpoint != "" { + o.BaseEndpoint = aws_sdkv2.String(endpoint) + } + }), nil } func ServicePackage(ctx context.Context) conns.ServicePackage { diff --git a/internal/service/codebuild/tags_gen.go b/internal/service/codebuild/tags_gen.go index 4047d797bca..5d4d6b0292e 100644 --- a/internal/service/codebuild/tags_gen.go +++ b/internal/service/codebuild/tags_gen.go @@ -4,8 +4,9 @@ package codebuild import ( "context" - "github.com/aws/aws-sdk-go/aws" - "github.com/aws/aws-sdk-go/service/codebuild" + "github.com/aws/aws-sdk-go-v2/aws" + "github.com/aws/aws-sdk-go-v2/service/codebuild" + awstypes "github.com/aws/aws-sdk-go-v2/service/codebuild/types" tftags "github.com/hashicorp/terraform-provider-aws/internal/tags" "github.com/hashicorp/terraform-provider-aws/internal/types/option" ) @@ -13,11 +14,11 @@ import ( // []*SERVICE.Tag handling // Tags returns codebuild service tags. -func Tags(tags tftags.KeyValueTags) []*codebuild.Tag { - result := make([]*codebuild.Tag, 0, len(tags)) +func Tags(tags tftags.KeyValueTags) []awstypes.Tag { + result := make([]awstypes.Tag, 0, len(tags)) for k, v := range tags.Map() { - tag := &codebuild.Tag{ + tag := awstypes.Tag{ Key: aws.String(k), Value: aws.String(v), } @@ -29,11 +30,11 @@ func Tags(tags tftags.KeyValueTags) []*codebuild.Tag { } // KeyValueTags creates tftags.KeyValueTags from codebuild service tags. -func KeyValueTags(ctx context.Context, tags []*codebuild.Tag) tftags.KeyValueTags { +func KeyValueTags(ctx context.Context, tags []awstypes.Tag) tftags.KeyValueTags { m := make(map[string]*string, len(tags)) for _, tag := range tags { - m[aws.StringValue(tag.Key)] = tag.Value + m[aws.ToString(tag.Key)] = tag.Value } return tftags.New(ctx, m) @@ -41,7 +42,7 @@ func KeyValueTags(ctx context.Context, tags []*codebuild.Tag) tftags.KeyValueTag // getTagsIn returns codebuild service tags from Context. // nil is returned if there are no input tags. -func getTagsIn(ctx context.Context) []*codebuild.Tag { +func getTagsIn(ctx context.Context) []awstypes.Tag { if inContext, ok := tftags.FromContext(ctx); ok { if tags := Tags(inContext.TagsIn.UnwrapOrDefault()); len(tags) > 0 { return tags @@ -52,7 +53,7 @@ func getTagsIn(ctx context.Context) []*codebuild.Tag { } // setTagsOut sets codebuild service tags in Context. -func setTagsOut(ctx context.Context, tags []*codebuild.Tag) { +func setTagsOut(ctx context.Context, tags []awstypes.Tag) { if inContext, ok := tftags.FromContext(ctx); ok { inContext.TagsOut = option.Some(KeyValueTags(ctx, tags)) } From fcc5143113db411dd3c43fb680b96a0cca10ba5f Mon Sep 17 00:00:00 2001 From: Kit Ewbank Date: Mon, 22 Jan 2024 10:01:11 -0500 Subject: [PATCH 04/30] Run 'go get github.com/aws/aws-sdk-go-v2/service/codebuild@v1.28.0 && go mod tidy'. --- go.mod | 1 + go.sum | 2 ++ 2 files changed, 3 insertions(+) diff --git a/go.mod b/go.mod index 56f8dcda82b..ab85454343c 100644 --- a/go.mod +++ b/go.mod @@ -26,6 +26,7 @@ require ( github.com/aws/aws-sdk-go-v2/service/cleanrooms v1.8.6 github.com/aws/aws-sdk-go-v2/service/cloudcontrol v1.15.7 github.com/aws/aws-sdk-go-v2/service/cloudwatchlogs v1.31.0 + github.com/aws/aws-sdk-go-v2/service/codebuild v1.28.0 github.com/aws/aws-sdk-go-v2/service/codecatalyst v1.10.6 github.com/aws/aws-sdk-go-v2/service/codecommit v1.20.1 github.com/aws/aws-sdk-go-v2/service/codedeploy v1.22.3 diff --git a/go.sum b/go.sum index 027e3bbd233..b06022d3ca0 100644 --- a/go.sum +++ b/go.sum @@ -75,6 +75,8 @@ github.com/aws/aws-sdk-go-v2/service/cloudcontrol v1.15.7 h1:8sBfx7QkDZ6dgfUNXWH github.com/aws/aws-sdk-go-v2/service/cloudcontrol v1.15.7/go.mod h1:P1EMD13hrBE2KUw030w482Eyk2NmOFIvGqmgNi4XRDc= github.com/aws/aws-sdk-go-v2/service/cloudwatchlogs v1.31.0 h1:Rk+Ft0Mu/eiNt2iJ2oS8Gf1h5m6q5crwS8cmlTylnvM= github.com/aws/aws-sdk-go-v2/service/cloudwatchlogs v1.31.0/go.mod h1:jZNaJEtn9TLi3pfxycLz79HVkKxP8ZdYm92iaNFgBsA= +github.com/aws/aws-sdk-go-v2/service/codebuild v1.28.0 h1:ueiaIwSz96QKCX+xfb4dWC681xycSLybAFOz3X11U7s= +github.com/aws/aws-sdk-go-v2/service/codebuild v1.28.0/go.mod h1:kHgLQLdLEqmcxQxDuGjz1naXvaUvBMMKolAJECBwYtw= github.com/aws/aws-sdk-go-v2/service/codecatalyst v1.10.6 h1:WLVD5wFI3yC1u/8L9bNeZ9+VURSdKjGA1Q+n+F1355Y= github.com/aws/aws-sdk-go-v2/service/codecatalyst v1.10.6/go.mod h1:/lHwoB/rkF3eWMJPvm9wXN7y1THwqCLCOrF7xzA2u9E= github.com/aws/aws-sdk-go-v2/service/codecommit v1.20.1 h1:FJx0hebVW2EfD5eJjRSkQdWsqFTvZSCSitnMHcOxxko= From 0cbbc6150b0b4b227d806bb0082c0aa090a51a6a Mon Sep 17 00:00:00 2001 From: Kit Ewbank Date: Mon, 22 Jan 2024 10:03:10 -0500 Subject: [PATCH 05/30] Correct tagging code generation. --- internal/service/codebuild/generate.go | 2 +- internal/service/codebuild/tags_gen.go | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/internal/service/codebuild/generate.go b/internal/service/codebuild/generate.go index 580cdeb6ebe..39325eefabf 100644 --- a/internal/service/codebuild/generate.go +++ b/internal/service/codebuild/generate.go @@ -1,7 +1,7 @@ // Copyright (c) HashiCorp, Inc. // SPDX-License-Identifier: MPL-2.0 -//go:generate go run ../../generate/tags/main.go -AWSSDKVersion=2 -ServiceTagsSlice +//go:generate go run ../../generate/tags/main.go -AWSSDKVersion=2 -ServiceTagsSlice -SkipAWSServiceImp //go:generate go run ../../generate/servicepackage/main.go // ONLY generate directives and package declaration! Do not add anything else to this file. diff --git a/internal/service/codebuild/tags_gen.go b/internal/service/codebuild/tags_gen.go index 5d4d6b0292e..1cfbf0592ce 100644 --- a/internal/service/codebuild/tags_gen.go +++ b/internal/service/codebuild/tags_gen.go @@ -5,7 +5,6 @@ import ( "context" "github.com/aws/aws-sdk-go-v2/aws" - "github.com/aws/aws-sdk-go-v2/service/codebuild" awstypes "github.com/aws/aws-sdk-go-v2/service/codebuild/types" tftags "github.com/hashicorp/terraform-provider-aws/internal/tags" "github.com/hashicorp/terraform-provider-aws/internal/types/option" From 763ef3989ab3dc2faec58da637b2dd0ad9c183aa Mon Sep 17 00:00:00 2001 From: Kit Ewbank Date: Mon, 22 Jan 2024 10:11:14 -0500 Subject: [PATCH 06/30] r/aws_codebuild_resource_policy: Migrate to AWS SDK for Go v2. --- internal/service/codebuild/exports_test.go | 11 +++ internal/service/codebuild/find.go | 22 ----- internal/service/codebuild/resource_policy.go | 81 +++++++++++++------ .../service/codebuild/service_package_gen.go | 3 +- 4 files changed, 68 insertions(+), 49 deletions(-) create mode 100644 internal/service/codebuild/exports_test.go diff --git a/internal/service/codebuild/exports_test.go b/internal/service/codebuild/exports_test.go new file mode 100644 index 00000000000..0a351b8e0d0 --- /dev/null +++ b/internal/service/codebuild/exports_test.go @@ -0,0 +1,11 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package codebuild + +// Exports for use in tests only. +var ( + ResourceResourcePolicy = resourceResourcePolicy + + FindResourcePolicyByARN = findResourcePolicyByARN +) diff --git a/internal/service/codebuild/find.go b/internal/service/codebuild/find.go index 0dae36d7569..9e1c85f5117 100644 --- a/internal/service/codebuild/find.go +++ b/internal/service/codebuild/find.go @@ -8,7 +8,6 @@ import ( "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/service/codebuild" - "github.com/hashicorp/aws-sdk-go-base/v2/awsv1shim/v2/tfawserr" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/retry" "github.com/hashicorp/terraform-provider-aws/internal/tfresource" ) @@ -59,27 +58,6 @@ func FindProjectByARN(ctx context.Context, conn *codebuild.CodeBuild, arn string return output.Projects[0], nil } -func FindResourcePolicyByARN(ctx context.Context, conn *codebuild.CodeBuild, arn string) (*codebuild.GetResourcePolicyOutput, error) { - input := &codebuild.GetResourcePolicyInput{ - ResourceArn: aws.String(arn), - } - - output, err := conn.GetResourcePolicyWithContext(ctx, input) - if tfawserr.ErrMessageContains(err, codebuild.ErrCodeResourceNotFoundException, "Resource ARN does not exist") || - tfawserr.ErrMessageContains(err, codebuild.ErrCodeResourceNotFoundException, "Resource ARN resource policy does not exist") { - return nil, &retry.NotFoundError{ - LastError: err, - LastRequest: input, - } - } - - if err != nil { - return nil, err - } - - return output, nil -} - func FindSourceCredentialByARN(ctx context.Context, conn *codebuild.CodeBuild, arn string) (*codebuild.SourceCredentialsInfo, error) { var result *codebuild.SourceCredentialsInfo input := &codebuild.ListSourceCredentialsInput{} diff --git a/internal/service/codebuild/resource_policy.go b/internal/service/codebuild/resource_policy.go index 8844f2a5a61..9c6d021ab11 100644 --- a/internal/service/codebuild/resource_policy.go +++ b/internal/service/codebuild/resource_policy.go @@ -7,21 +7,23 @@ import ( "context" "log" - "github.com/aws/aws-sdk-go/aws" - "github.com/aws/aws-sdk-go/service/codebuild" - "github.com/hashicorp/aws-sdk-go-base/v2/awsv1shim/v2/tfawserr" + "github.com/aws/aws-sdk-go-v2/aws" + "github.com/aws/aws-sdk-go-v2/service/codebuild" + "github.com/aws/aws-sdk-go-v2/service/codebuild/types" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/retry" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/structure" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" "github.com/hashicorp/terraform-provider-aws/internal/conns" + "github.com/hashicorp/terraform-provider-aws/internal/errs" "github.com/hashicorp/terraform-provider-aws/internal/errs/sdkdiag" "github.com/hashicorp/terraform-provider-aws/internal/tfresource" "github.com/hashicorp/terraform-provider-aws/internal/verify" ) -// @SDKResource("aws_codebuild_resource_policy") -func ResourceResourcePolicy() *schema.Resource { +// @SDKResource("aws_codebuild_resource_policy", name="Resource Policy") +func resourceResourcePolicy() *schema.Resource { return &schema.Resource{ CreateWithoutTimeout: resourceResourcePolicyPut, ReadWithoutTimeout: resourceResourcePolicyRead, @@ -56,12 +58,11 @@ func ResourceResourcePolicy() *schema.Resource { func resourceResourcePolicyPut(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { var diags diag.Diagnostics - conn := meta.(*conns.AWSClient).CodeBuildConn(ctx) + conn := meta.(*conns.AWSClient).CodeBuildClient(ctx) policy, err := structure.NormalizeJsonString(d.Get("policy").(string)) - if err != nil { - return sdkdiag.AppendErrorf(diags, "policy (%s) is invalid JSON: %s", d.Get("policy").(string), err) + return sdkdiag.AppendFromErr(diags, err) } input := &codebuild.PutResourcePolicyInput{ @@ -69,21 +70,24 @@ func resourceResourcePolicyPut(ctx context.Context, d *schema.ResourceData, meta ResourceArn: aws.String(d.Get("resource_arn").(string)), } - resp, err := conn.PutResourcePolicyWithContext(ctx, input) + output, err := conn.PutResourcePolicy(ctx, input) + if err != nil { - return sdkdiag.AppendErrorf(diags, "creating CodeBuild Resource Policy: %s", err) + return sdkdiag.AppendErrorf(diags, "putting CodeBuild Resource Policy: %s", err) } - d.SetId(aws.StringValue(resp.ResourceArn)) + if d.IsNewResource() { + d.SetId(aws.ToString(output.ResourceArn)) + } return append(diags, resourceResourcePolicyRead(ctx, d, meta)...) } func resourceResourcePolicyRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { var diags diag.Diagnostics - conn := meta.(*conns.AWSClient).CodeBuildConn(ctx) + conn := meta.(*conns.AWSClient).CodeBuildClient(ctx) - output, err := FindResourcePolicyByARN(ctx, conn, d.Id()) + output, err := findResourcePolicyByARN(ctx, conn, d.Id()) if !d.IsNewResource() && tfresource.NotFound(err) { log.Printf("[WARN] CodeBuild Resource Policy (%s) not found, removing from state", d.Id()) @@ -92,41 +96,66 @@ func resourceResourcePolicyRead(ctx context.Context, d *schema.ResourceData, met } if err != nil { - return sdkdiag.AppendErrorf(diags, "Listing CodeBuild Resource Policies: %s", err) + return sdkdiag.AppendErrorf(diags, "reading CodeBuild Resource Policy (%s): %s", d.Id(), err) } - policyToSet, err := verify.SecondJSONUnlessEquivalent(d.Get("policy").(string), aws.StringValue(output.Policy)) - + policyToSet, err := verify.SecondJSONUnlessEquivalent(d.Get("policy").(string), aws.ToString(output.Policy)) if err != nil { - return sdkdiag.AppendErrorf(diags, "while setting policy (%s), encountered: %s", policyToSet, err) + return sdkdiag.AppendFromErr(diags, err) } policyToSet, err = structure.NormalizeJsonString(policyToSet) - if err != nil { - return sdkdiag.AppendErrorf(diags, "policy (%s) is an invalid JSON: %s", policyToSet, err) + return sdkdiag.AppendFromErr(diags, err) } - d.Set("resource_arn", d.Id()) d.Set("policy", policyToSet) + d.Set("resource_arn", d.Id()) return diags } func resourceResourcePolicyDelete(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { var diags diag.Diagnostics - conn := meta.(*conns.AWSClient).CodeBuildConn(ctx) + conn := meta.(*conns.AWSClient).CodeBuildClient(ctx) - deleteOpts := &codebuild.DeleteResourcePolicyInput{ + log.Printf("[INFO] Deleting CodeBuild Resource Policy: %s", d.Id()) + _, err := conn.DeleteResourcePolicy(ctx, &codebuild.DeleteResourcePolicyInput{ ResourceArn: aws.String(d.Id()), + }) + + if errs.IsA[*types.ResourceNotFoundException](err) { + return diags } - if _, err := conn.DeleteResourcePolicyWithContext(ctx, deleteOpts); err != nil { - if tfawserr.ErrMessageContains(err, codebuild.ErrCodeResourceNotFoundException, "Resource ARN does not exist") { - return diags - } + if err != nil { return sdkdiag.AppendErrorf(diags, "deleting CodeBuild Resource Policy (%s): %s", d.Id(), err) } return diags } + +func findResourcePolicyByARN(ctx context.Context, conn *codebuild.Client, arn string) (*codebuild.GetResourcePolicyOutput, error) { + input := &codebuild.GetResourcePolicyInput{ + ResourceArn: aws.String(arn), + } + + output, err := conn.GetResourcePolicy(ctx, input) + + if errs.IsA[*types.ResourceNotFoundException](err) { + return nil, &retry.NotFoundError{ + LastError: err, + LastRequest: input, + } + } + + if err != nil { + return nil, err + } + + if output == nil || output.Policy == nil { + return nil, tfresource.NewEmptyResultError(input) + } + + return output, nil +} diff --git a/internal/service/codebuild/service_package_gen.go b/internal/service/codebuild/service_package_gen.go index 743028f1c4a..5d447db1de6 100644 --- a/internal/service/codebuild/service_package_gen.go +++ b/internal/service/codebuild/service_package_gen.go @@ -41,8 +41,9 @@ func (p *servicePackage) SDKResources(ctx context.Context) []*types.ServicePacka Tags: &types.ServicePackageResourceTags{}, }, { - Factory: ResourceResourcePolicy, + Factory: resourceResourcePolicy, TypeName: "aws_codebuild_resource_policy", + Name: "Resource Policy", }, { Factory: ResourceSourceCredential, From ef77a32967b776b37268fa38787adb0fefe7b782 Mon Sep 17 00:00:00 2001 From: Kit Ewbank Date: Mon, 22 Jan 2024 11:52:30 -0500 Subject: [PATCH 07/30] r/aws_codebuild_source_credential: Migrate to AWS SDK for Go v2. --- internal/service/codebuild/exports_test.go | 6 +- internal/service/codebuild/find.go | 34 ------ .../service/codebuild/service_package_gen.go | 3 +- .../service/codebuild/source_credential.go | 113 ++++++++++++------ 4 files changed, 82 insertions(+), 74 deletions(-) diff --git a/internal/service/codebuild/exports_test.go b/internal/service/codebuild/exports_test.go index 0a351b8e0d0..1f947192da0 100644 --- a/internal/service/codebuild/exports_test.go +++ b/internal/service/codebuild/exports_test.go @@ -5,7 +5,9 @@ package codebuild // Exports for use in tests only. var ( - ResourceResourcePolicy = resourceResourcePolicy + ResourceResourcePolicy = resourceResourcePolicy + ResourceSourceCredential = resourceSourceCredential - FindResourcePolicyByARN = findResourcePolicyByARN + FindResourcePolicyByARN = findResourcePolicyByARN + FindSourceCredentialsByARN = findSourceCredentialsByARN ) diff --git a/internal/service/codebuild/find.go b/internal/service/codebuild/find.go index 9e1c85f5117..287d15608b5 100644 --- a/internal/service/codebuild/find.go +++ b/internal/service/codebuild/find.go @@ -8,7 +8,6 @@ import ( "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/service/codebuild" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/retry" "github.com/hashicorp/terraform-provider-aws/internal/tfresource" ) @@ -57,36 +56,3 @@ func FindProjectByARN(ctx context.Context, conn *codebuild.CodeBuild, arn string return output.Projects[0], nil } - -func FindSourceCredentialByARN(ctx context.Context, conn *codebuild.CodeBuild, arn string) (*codebuild.SourceCredentialsInfo, error) { - var result *codebuild.SourceCredentialsInfo - input := &codebuild.ListSourceCredentialsInput{} - output, err := conn.ListSourceCredentialsWithContext(ctx, input) - if err != nil { - return nil, err - } - - if output == nil { - return nil, tfresource.NewEmptyResultError(input) - } - - for _, sourceCred := range output.SourceCredentialsInfos { - if sourceCred == nil { - continue - } - - if aws.StringValue(sourceCred.Arn) == arn { - result = sourceCred - break - } - } - - if result == nil { - return nil, &retry.NotFoundError{ - LastError: err, - LastRequest: input, - } - } - - return result, nil -} diff --git a/internal/service/codebuild/service_package_gen.go b/internal/service/codebuild/service_package_gen.go index 5d447db1de6..fd807d2f143 100644 --- a/internal/service/codebuild/service_package_gen.go +++ b/internal/service/codebuild/service_package_gen.go @@ -46,8 +46,9 @@ func (p *servicePackage) SDKResources(ctx context.Context) []*types.ServicePacka Name: "Resource Policy", }, { - Factory: ResourceSourceCredential, + Factory: resourceSourceCredential, TypeName: "aws_codebuild_source_credential", + Name: "Source Credential", }, { Factory: ResourceWebhook, diff --git a/internal/service/codebuild/source_credential.go b/internal/service/codebuild/source_credential.go index 7d29ab77638..15d057ea84f 100644 --- a/internal/service/codebuild/source_credential.go +++ b/internal/service/codebuild/source_credential.go @@ -7,19 +7,21 @@ import ( "context" "log" - "github.com/aws/aws-sdk-go/aws" - "github.com/aws/aws-sdk-go/service/codebuild" - "github.com/hashicorp/aws-sdk-go-base/v2/awsv1shim/v2/tfawserr" + "github.com/aws/aws-sdk-go-v2/aws" + "github.com/aws/aws-sdk-go-v2/service/codebuild" + "github.com/aws/aws-sdk-go-v2/service/codebuild/types" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" "github.com/hashicorp/terraform-provider-aws/internal/conns" + "github.com/hashicorp/terraform-provider-aws/internal/enum" + "github.com/hashicorp/terraform-provider-aws/internal/errs" "github.com/hashicorp/terraform-provider-aws/internal/errs/sdkdiag" + tfslices "github.com/hashicorp/terraform-provider-aws/internal/slices" "github.com/hashicorp/terraform-provider-aws/internal/tfresource" ) -// @SDKResource("aws_codebuild_source_credential") -func ResourceSourceCredential() *schema.Resource { +// @SDKResource("aws_codebuild_source_credential", name="Source Credential") +func resourceSourceCredential() *schema.Resource { return &schema.Resource{ CreateWithoutTimeout: resourceSourceCredentialCreate, ReadWithoutTimeout: resourceSourceCredentialRead, @@ -35,16 +37,16 @@ func ResourceSourceCredential() *schema.Resource { Computed: true, }, "auth_type": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringInSlice(codebuild.AuthType_Values(), false), + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateDiagFunc: enum.Validate[types.AuthType](), }, "server_type": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringInSlice(codebuild.ServerType_Values(), false), + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateDiagFunc: enum.Validate[types.ServerType](), }, "token": { Type: schema.TypeString, @@ -63,35 +65,36 @@ func ResourceSourceCredential() *schema.Resource { func resourceSourceCredentialCreate(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { var diags diag.Diagnostics - conn := meta.(*conns.AWSClient).CodeBuildConn(ctx) + conn := meta.(*conns.AWSClient).CodeBuildClient(ctx) - authType := d.Get("auth_type").(string) - - createOpts := &codebuild.ImportSourceCredentialsInput{ - AuthType: aws.String(authType), - ServerType: aws.String(d.Get("server_type").(string)), + authType := types.AuthType(d.Get("auth_type").(string)) + input := &codebuild.ImportSourceCredentialsInput{ + AuthType: authType, + ServerType: types.ServerType(d.Get("server_type").(string)), Token: aws.String(d.Get("token").(string)), } - if attr, ok := d.GetOk("user_name"); ok && authType == codebuild.AuthTypeBasicAuth { - createOpts.Username = aws.String(attr.(string)) + if attr, ok := d.GetOk("user_name"); ok && authType == types.AuthTypeBasicAuth { + input.Username = aws.String(attr.(string)) } - resp, err := conn.ImportSourceCredentialsWithContext(ctx, createOpts) + output, err := conn.ImportSourceCredentials(ctx, input) + if err != nil { - return sdkdiag.AppendErrorf(diags, "importing source credentials: %s", err) + return sdkdiag.AppendErrorf(diags, "creating CodeBuild Source Credential: %s", err) } - d.SetId(aws.StringValue(resp.Arn)) + d.SetId(aws.ToString(output.Arn)) return append(diags, resourceSourceCredentialRead(ctx, d, meta)...) } func resourceSourceCredentialRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { var diags diag.Diagnostics - conn := meta.(*conns.AWSClient).CodeBuildConn(ctx) + conn := meta.(*conns.AWSClient).CodeBuildClient(ctx) + + credentials, err := findSourceCredentialsByARN(ctx, conn, d.Id()) - resp, err := FindSourceCredentialByARN(ctx, conn, d.Id()) if !d.IsNewResource() && tfresource.NotFound(err) { log.Printf("[WARN] CodeBuild Source Credential (%s) not found, removing from state", d.Id()) d.SetId("") @@ -102,27 +105,63 @@ func resourceSourceCredentialRead(ctx context.Context, d *schema.ResourceData, m return sdkdiag.AppendErrorf(diags, "reading CodeBuild Source Credential (%s): %s", d.Id(), err) } - d.Set("arn", resp.Arn) - d.Set("auth_type", resp.AuthType) - d.Set("server_type", resp.ServerType) + d.Set("arn", credentials.Arn) + d.Set("auth_type", credentials.AuthType) + d.Set("server_type", credentials.ServerType) return diags } func resourceSourceCredentialDelete(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { var diags diag.Diagnostics - conn := meta.(*conns.AWSClient).CodeBuildConn(ctx) + conn := meta.(*conns.AWSClient).CodeBuildClient(ctx) - deleteOpts := &codebuild.DeleteSourceCredentialsInput{ + log.Printf("[INFO] Deleting CodeBuild Source Credential: %s", d.Id()) + _, err := conn.DeleteSourceCredentials(ctx, &codebuild.DeleteSourceCredentialsInput{ Arn: aws.String(d.Id()), + }) + + if errs.IsA[*types.ResourceNotFoundException](err) { + return diags } - if _, err := conn.DeleteSourceCredentialsWithContext(ctx, deleteOpts); err != nil { - if tfawserr.ErrCodeEquals(err, codebuild.ErrCodeResourceNotFoundException) { - return diags - } - return sdkdiag.AppendErrorf(diags, "deleting CodeBuild Source Credentials(%s): %s", d.Id(), err) + if err != nil { + return sdkdiag.AppendErrorf(diags, "deleting CodeBuild Source Credential (%s): %s", d.Id(), err) } return diags } + +func findSourceCredentialsByARN(ctx context.Context, conn *codebuild.Client, arn string) (*types.SourceCredentialsInfo, error) { + input := &codebuild.ListSourceCredentialsInput{} + output, err := findSourceCredentials(ctx, conn, input, func(v *types.SourceCredentialsInfo) bool { + return aws.ToString(v.Arn) == arn + }) + + if err != nil { + return nil, err + } + + return tfresource.AssertSingleValueResult(output) +} + +func findSourceCredentials(ctx context.Context, conn *codebuild.Client, input *codebuild.ListSourceCredentialsInput, filter tfslices.Predicate[*types.SourceCredentialsInfo]) ([]types.SourceCredentialsInfo, error) { + var sourceCredentials []types.SourceCredentialsInfo + output, err := conn.ListSourceCredentials(ctx, input) + + if err != nil { + return nil, err + } + + if output == nil { + return nil, tfresource.NewEmptyResultError(input) + } + + for _, v := range output.SourceCredentialsInfos { + if filter(&v) { + sourceCredentials = append(sourceCredentials, v) + } + } + + return sourceCredentials, nil +} From 1afd700336a8a337edbf0d0cf49adcb022f2cd06 Mon Sep 17 00:00:00 2001 From: Kit Ewbank Date: Mon, 22 Jan 2024 12:17:52 -0500 Subject: [PATCH 08/30] r/aws_codebuild_report_group: Migrate to AWS SDK for Go v2. --- internal/service/codebuild/consts.go | 3 +- internal/service/codebuild/exports_test.go | 2 + internal/service/codebuild/find.go | 25 -- internal/service/codebuild/report_group.go | 294 +++++++++++------- .../service/codebuild/service_package_gen.go | 2 +- internal/service/codebuild/status.go | 33 -- internal/service/codebuild/wait.go | 35 --- 7 files changed, 187 insertions(+), 207 deletions(-) delete mode 100644 internal/service/codebuild/status.go delete mode 100644 internal/service/codebuild/wait.go diff --git a/internal/service/codebuild/consts.go b/internal/service/codebuild/consts.go index 00a7ff7167f..f5fafb3d3ee 100644 --- a/internal/service/codebuild/consts.go +++ b/internal/service/codebuild/consts.go @@ -8,8 +8,7 @@ import ( ) const ( - ResNameReportGroup = "Report Group" - ResNameWebhook = "Webhook" + ResNameWebhook = "Webhook" ) const ( diff --git a/internal/service/codebuild/exports_test.go b/internal/service/codebuild/exports_test.go index 1f947192da0..f0c206ecdb4 100644 --- a/internal/service/codebuild/exports_test.go +++ b/internal/service/codebuild/exports_test.go @@ -5,9 +5,11 @@ package codebuild // Exports for use in tests only. var ( + ResourceReportGroup = resourceReportGroup ResourceResourcePolicy = resourceResourcePolicy ResourceSourceCredential = resourceSourceCredential + FindReportGroupByARN = findReportGroupByARN FindResourcePolicyByARN = findResourcePolicyByARN FindSourceCredentialsByARN = findSourceCredentialsByARN ) diff --git a/internal/service/codebuild/find.go b/internal/service/codebuild/find.go index 287d15608b5..8a11d2a1e14 100644 --- a/internal/service/codebuild/find.go +++ b/internal/service/codebuild/find.go @@ -11,31 +11,6 @@ import ( "github.com/hashicorp/terraform-provider-aws/internal/tfresource" ) -// FindReportGroupByARN returns the Report Group corresponding to the specified Arn. -func FindReportGroupByARN(ctx context.Context, conn *codebuild.CodeBuild, arn string) (*codebuild.ReportGroup, error) { - output, err := conn.BatchGetReportGroupsWithContext(ctx, &codebuild.BatchGetReportGroupsInput{ - ReportGroupArns: aws.StringSlice([]string{arn}), - }) - if err != nil { - return nil, err - } - - if output == nil { - return nil, nil - } - - if len(output.ReportGroups) == 0 { - return nil, nil - } - - reportGroup := output.ReportGroups[0] - if reportGroup == nil { - return nil, nil - } - - return reportGroup, nil -} - func FindProjectByARN(ctx context.Context, conn *codebuild.CodeBuild, arn string) (*codebuild.Project, error) { input := &codebuild.BatchGetProjectsInput{ Names: []*string{aws.String(arn)}, diff --git a/internal/service/codebuild/report_group.go b/internal/service/codebuild/report_group.go index 442e4780e60..63b6792390f 100644 --- a/internal/service/codebuild/report_group.go +++ b/internal/service/codebuild/report_group.go @@ -5,26 +5,29 @@ package codebuild import ( "context" - "errors" + "log" "time" - "github.com/aws/aws-sdk-go/aws" - "github.com/aws/aws-sdk-go/service/codebuild" - "github.com/hashicorp/aws-sdk-go-base/v2/awsv1shim/v2/tfawserr" + "github.com/aws/aws-sdk-go-v2/aws" + "github.com/aws/aws-sdk-go-v2/service/codebuild" + "github.com/aws/aws-sdk-go-v2/service/codebuild/types" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/retry" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" "github.com/hashicorp/terraform-provider-aws/internal/conns" - "github.com/hashicorp/terraform-provider-aws/internal/create" + "github.com/hashicorp/terraform-provider-aws/internal/enum" "github.com/hashicorp/terraform-provider-aws/internal/errs/sdkdiag" + tfslices "github.com/hashicorp/terraform-provider-aws/internal/slices" tftags "github.com/hashicorp/terraform-provider-aws/internal/tags" + "github.com/hashicorp/terraform-provider-aws/internal/tfresource" "github.com/hashicorp/terraform-provider-aws/internal/verify" "github.com/hashicorp/terraform-provider-aws/names" ) // @SDKResource("aws_codebuild_report_group", name="Report Group") // @Tags -func ResourceReportGroup() *schema.Resource { +func resourceReportGroup() *schema.Resource { return &schema.Resource{ CreateWithoutTimeout: resourceReportGroupCreate, ReadWithoutTimeout: resourceReportGroupRead, @@ -40,17 +43,14 @@ func ResourceReportGroup() *schema.Resource { Type: schema.TypeString, Computed: true, }, - "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringLenBetween(2, 128), + "created": { + Type: schema.TypeString, + Computed: true, }, - "type": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringInSlice(codebuild.ReportType_Values(), false), + "delete_reports": { + Type: schema.TypeBool, + Optional: true, + Default: false, }, "export_config": { Type: schema.TypeList, @@ -58,11 +58,6 @@ func ResourceReportGroup() *schema.Resource { MaxItems: 1, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ - "type": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validation.StringInSlice(codebuild.ReportExportConfigType_Values(), false), - }, "s3_destination": { Type: schema.TypeList, Optional: true, @@ -83,10 +78,10 @@ func ResourceReportGroup() *schema.Resource { ValidateFunc: verify.ValidARN, }, "packaging": { - Type: schema.TypeString, - Optional: true, - Default: codebuild.ReportPackagingTypeNone, - ValidateFunc: validation.StringInSlice(codebuild.ReportPackagingType_Values(), false), + Type: schema.TypeString, + Optional: true, + Default: types.ReportPackagingTypeNone, + ValidateDiagFunc: enum.Validate[types.ReportPackagingType](), }, "path": { Type: schema.TypeString, @@ -95,20 +90,28 @@ func ResourceReportGroup() *schema.Resource { }, }, }, + "type": { + Type: schema.TypeString, + Required: true, + ValidateDiagFunc: enum.Validate[types.ReportExportConfigType](), + }, }, }, }, - "created": { - Type: schema.TypeString, - Computed: true, - }, - "delete_reports": { - Type: schema.TypeBool, - Optional: true, - Default: false, + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringLenBetween(2, 128), }, names.AttrTags: tftags.TagsSchema(), names.AttrTagsAll: tftags.TagsSchemaComputed(), + "type": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateDiagFunc: enum.Validate[types.ReportType](), + }, }, CustomizeDiff: verify.SetTagsDiff, @@ -117,61 +120,50 @@ func ResourceReportGroup() *schema.Resource { func resourceReportGroupCreate(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { var diags diag.Diagnostics - conn := meta.(*conns.AWSClient).CodeBuildConn(ctx) + conn := meta.(*conns.AWSClient).CodeBuildClient(ctx) + name := d.Get("name").(string) input := &codebuild.CreateReportGroupInput{ - Name: aws.String(d.Get("name").(string)), - Type: aws.String(d.Get("type").(string)), ExportConfig: expandReportGroupExportConfig(d.Get("export_config").([]interface{})), + Name: aws.String(name), Tags: getTagsIn(ctx), + Type: types.ReportType(d.Get("type").(string)), } - resp, err := conn.CreateReportGroupWithContext(ctx, input) + output, err := conn.CreateReportGroup(ctx, input) + if err != nil { - return sdkdiag.AppendErrorf(diags, "creating CodeBuild Report Group: %s", err) + return sdkdiag.AppendErrorf(diags, "creating CodeBuild Report Group (%s): %s", name, err) } - d.SetId(aws.StringValue(resp.ReportGroup.Arn)) + d.SetId(aws.ToString(output.ReportGroup.Arn)) return append(diags, resourceReportGroupRead(ctx, d, meta)...) } func resourceReportGroupRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { var diags diag.Diagnostics - conn := meta.(*conns.AWSClient).CodeBuildConn(ctx) - - reportGroup, err := FindReportGroupByARN(ctx, conn, d.Id()) - if !d.IsNewResource() && tfawserr.ErrCodeEquals(err, codebuild.ErrCodeResourceNotFoundException) { - create.LogNotFoundRemoveState(names.CodeBuild, create.ErrActionReading, ResNameReportGroup, d.Id()) - d.SetId("") - return diags - } + conn := meta.(*conns.AWSClient).CodeBuildClient(ctx) - if err != nil { - return create.AppendDiagError(diags, names.CodeBuild, create.ErrActionReading, ResNameReportGroup, d.Id(), err) - } + reportGroup, err := findReportGroupByARN(ctx, conn, d.Id()) - if !d.IsNewResource() && reportGroup == nil { - create.LogNotFoundRemoveState(names.CodeBuild, create.ErrActionReading, ResNameReportGroup, d.Id()) + if !d.IsNewResource() && tfresource.NotFound(err) { + log.Printf("[WARN] CodeBuild Report Group (%s) not found, removing from state", d.Id()) d.SetId("") return diags } - if reportGroup == nil { - return create.AppendDiagError(diags, names.CodeBuild, create.ErrActionReading, ResNameReportGroup, d.Id(), errors.New("not found after creation")) + if err != nil { + return sdkdiag.AppendErrorf(diags, "reading CodeBuild Report Group (%s): %s", d.Id(), err) } d.Set("arn", reportGroup.Arn) - d.Set("type", reportGroup.Type) - d.Set("name", reportGroup.Name) - - if err := d.Set("created", reportGroup.Created.Format(time.RFC3339)); err != nil { - return sdkdiag.AppendErrorf(diags, "setting created: %s", err) - } - + d.Set("created", reportGroup.Created.Format(time.RFC3339)) if err := d.Set("export_config", flattenReportGroupExportConfig(reportGroup.ExportConfig)); err != nil { return sdkdiag.AppendErrorf(diags, "setting export config: %s", err) } + d.Set("name", reportGroup.Name) + d.Set("type", reportGroup.Type) setTagsOut(ctx, reportGroup.Tags) @@ -180,7 +172,7 @@ func resourceReportGroupRead(ctx context.Context, d *schema.ResourceData, meta i func resourceReportGroupUpdate(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { var diags diag.Diagnostics - conn := meta.(*conns.AWSClient).CodeBuildConn(ctx) + conn := meta.(*conns.AWSClient).CodeBuildClient(ctx) input := &codebuild.UpdateReportGroupInput{ Arn: aws.String(d.Id()), @@ -194,9 +186,10 @@ func resourceReportGroupUpdate(ctx context.Context, d *schema.ResourceData, meta input.Tags = getTagsIn(ctx) } - _, err := conn.UpdateReportGroupWithContext(ctx, input) + _, err := conn.UpdateReportGroup(ctx, input) + if err != nil { - return sdkdiag.AppendErrorf(diags, "updating CodeBuild Report Group: %s", err) + return sdkdiag.AppendErrorf(diags, "updating CodeBuild Report Group (%s): %s", d.Id(), err) } return append(diags, resourceReportGroupRead(ctx, d, meta)...) @@ -204,14 +197,15 @@ func resourceReportGroupUpdate(ctx context.Context, d *schema.ResourceData, meta func resourceReportGroupDelete(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { var diags diag.Diagnostics - conn := meta.(*conns.AWSClient).CodeBuildConn(ctx) + conn := meta.(*conns.AWSClient).CodeBuildClient(ctx) - deleteOpts := &codebuild.DeleteReportGroupInput{ + log.Printf("[INFO] Deleting CodeBuild Report Group: %s", d.Id()) + _, err := conn.DeleteReportGroup(ctx, &codebuild.DeleteReportGroupInput{ Arn: aws.String(d.Id()), - DeleteReports: aws.Bool(d.Get("delete_reports").(bool)), - } + DeleteReports: d.Get("delete_reports").(bool), + }) - if _, err := conn.DeleteReportGroupWithContext(ctx, deleteOpts); err != nil { + if err != nil { return sdkdiag.AppendErrorf(diags, "deleting CodeBuild Report Group (%s): %s", d.Id(), err) } @@ -222,83 +216,161 @@ func resourceReportGroupDelete(ctx context.Context, d *schema.ResourceData, meta return diags } -func expandReportGroupExportConfig(config []interface{}) *codebuild.ReportExportConfig { - if len(config) == 0 { - return nil +func findReportGroupByARN(ctx context.Context, conn *codebuild.Client, arn string) (*types.ReportGroup, error) { + input := &codebuild.BatchGetReportGroupsInput{ + ReportGroupArns: tfslices.Of(arn), + } + + return findReportGroup(ctx, conn, input) +} + +func findReportGroup(ctx context.Context, conn *codebuild.Client, input *codebuild.BatchGetReportGroupsInput) (*types.ReportGroup, error) { + output, err := findReportGroups(ctx, conn, input) + + if err != nil { + return nil, err + } + + return tfresource.AssertSingleValueResult(output) +} + +func findReportGroups(ctx context.Context, conn *codebuild.Client, input *codebuild.BatchGetReportGroupsInput) ([]types.ReportGroup, error) { + output, err := conn.BatchGetReportGroups(ctx, input) + + if err != nil { + return nil, err + } + + if output == nil { + return nil, tfresource.NewEmptyResultError(input) } - s := config[0].(map[string]interface{}) - exportConfig := &codebuild.ReportExportConfig{} + return output.ReportGroups, nil +} + +func statusReportGroup(ctx context.Context, conn *codebuild.Client, arn string) retry.StateRefreshFunc { + return func() (interface{}, string, error) { + output, err := findReportGroupByARN(ctx, conn, arn) + + if tfresource.NotFound(err) { + return nil, "", nil + } - if v, ok := s["type"]; ok { - exportConfig.ExportConfigType = aws.String(v.(string)) + if err != nil { + return nil, "", err + } + + return output, string(output.Status), nil + } +} + +func waitReportGroupDeleted(ctx context.Context, conn *codebuild.Client, arn string) (*types.ReportGroup, error) { + const ( + timeout = 2 * time.Minute + ) + stateConf := &retry.StateChangeConf{ + Pending: enum.Slice(types.ReportGroupStatusTypeDeleting), + Target: []string{}, + Refresh: statusReportGroup(ctx, conn, arn), + Timeout: timeout, } - if v, ok := s["s3_destination"]; ok { - exportConfig.S3Destination = expandReportGroupS3ReportExportConfig(v.([]interface{})) + outputRaw, err := stateConf.WaitForStateContext(ctx) + + if output, ok := outputRaw.(*types.ReportGroup); ok { + return output, err } - return exportConfig + return nil, err } -func flattenReportGroupExportConfig(config *codebuild.ReportExportConfig) []map[string]interface{} { - settings := make(map[string]interface{}) +func expandReportGroupExportConfig(tfList []interface{}) *types.ReportExportConfig { + if len(tfList) == 0 { + return nil + } + + tfMap := tfList[0].(map[string]interface{}) + apiObject := &types.ReportExportConfig{} + + if v, ok := tfMap["s3_destination"]; ok { + apiObject.S3Destination = expandReportGroupS3ReportExportConfig(v.([]interface{})) + } + + if v, ok := tfMap["type"]; ok { + apiObject.ExportConfigType = types.ReportExportConfigType(v.(string)) + } + + return apiObject +} - if config == nil { +func flattenReportGroupExportConfig(apiObject *types.ReportExportConfig) []map[string]interface{} { + if apiObject == nil { return nil } - settings["s3_destination"] = flattenReportGroupS3ReportExportConfig(config.S3Destination) - settings["type"] = aws.StringValue(config.ExportConfigType) + tfMap := map[string]interface{}{ + "s3_destination": flattenReportGroupS3ReportExportConfig(apiObject.S3Destination), + "type": apiObject.ExportConfigType, + } - return []map[string]interface{}{settings} + return []map[string]interface{}{tfMap} } -func expandReportGroupS3ReportExportConfig(config []interface{}) *codebuild.S3ReportExportConfig { - if len(config) == 0 { +func expandReportGroupS3ReportExportConfig(tfList []interface{}) *types.S3ReportExportConfig { + if len(tfList) == 0 { return nil } - s := config[0].(map[string]interface{}) - s3ReportExportConfig := &codebuild.S3ReportExportConfig{} + tfMap := tfList[0].(map[string]interface{}) + apiObject := &types.S3ReportExportConfig{} - if v, ok := s["bucket"]; ok { - s3ReportExportConfig.Bucket = aws.String(v.(string)) + if v, ok := tfMap["bucket"]; ok { + apiObject.Bucket = aws.String(v.(string)) } - if v, ok := s["encryption_disabled"]; ok { - s3ReportExportConfig.EncryptionDisabled = aws.Bool(v.(bool)) + + if v, ok := tfMap["encryption_disabled"]; ok { + apiObject.EncryptionDisabled = aws.Bool(v.(bool)) } - if v, ok := s["encryption_key"]; ok { - s3ReportExportConfig.EncryptionKey = aws.String(v.(string)) + if v, ok := tfMap["encryption_key"]; ok { + apiObject.EncryptionKey = aws.String(v.(string)) } - if v, ok := s["packaging"]; ok { - s3ReportExportConfig.Packaging = aws.String(v.(string)) + if v, ok := tfMap["packaging"]; ok { + apiObject.Packaging = types.ReportPackagingType(v.(string)) } - if v, ok := s["path"]; ok { - s3ReportExportConfig.Path = aws.String(v.(string)) + if v, ok := tfMap["path"]; ok { + apiObject.Path = aws.String(v.(string)) } - return s3ReportExportConfig + return apiObject } -func flattenReportGroupS3ReportExportConfig(config *codebuild.S3ReportExportConfig) []map[string]interface{} { - settings := make(map[string]interface{}) - - if config == nil { +func flattenReportGroupS3ReportExportConfig(apiObject *types.S3ReportExportConfig) []map[string]interface{} { + if apiObject == nil { return nil } - settings["path"] = aws.StringValue(config.Path) - settings["bucket"] = aws.StringValue(config.Bucket) - settings["packaging"] = aws.StringValue(config.Packaging) - settings["encryption_disabled"] = aws.BoolValue(config.EncryptionDisabled) + tfMap := map[string]interface{}{ + "packaging": apiObject.Packaging, + } + + if v := apiObject.Bucket; v != nil { + tfMap["bucket"] = aws.ToString(v) + } + + if v := apiObject.EncryptionDisabled; v != nil { + tfMap["encryption_disabled"] = aws.ToBool(v) + } + + if v := apiObject.EncryptionKey; v != nil { + tfMap["encryption_key"] = aws.ToString(v) + } - if config.EncryptionKey != nil { - settings["encryption_key"] = aws.StringValue(config.EncryptionKey) + if v := apiObject.Path; v != nil { + tfMap["path"] = aws.ToString(v) } - return []map[string]interface{}{settings} + return []map[string]interface{}{tfMap} } diff --git a/internal/service/codebuild/service_package_gen.go b/internal/service/codebuild/service_package_gen.go index fd807d2f143..683ed3e0043 100644 --- a/internal/service/codebuild/service_package_gen.go +++ b/internal/service/codebuild/service_package_gen.go @@ -35,7 +35,7 @@ func (p *servicePackage) SDKResources(ctx context.Context) []*types.ServicePacka Tags: &types.ServicePackageResourceTags{}, }, { - Factory: ResourceReportGroup, + Factory: resourceReportGroup, TypeName: "aws_codebuild_report_group", Name: "Report Group", Tags: &types.ServicePackageResourceTags{}, diff --git a/internal/service/codebuild/status.go b/internal/service/codebuild/status.go deleted file mode 100644 index 19c237d16f3..00000000000 --- a/internal/service/codebuild/status.go +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package codebuild - -import ( - "context" - - "github.com/aws/aws-sdk-go/aws" - "github.com/aws/aws-sdk-go/service/codebuild" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/retry" -) - -const ( - reportGroupStatusUnknown = "Unknown" - reportGroupStatusNotFound = "NotFound" -) - -// statusReportGroup fetches the Report Group and its Status -func statusReportGroup(ctx context.Context, conn *codebuild.CodeBuild, arn string) retry.StateRefreshFunc { - return func() (interface{}, string, error) { - output, err := FindReportGroupByARN(ctx, conn, arn) - if err != nil { - return nil, reportGroupStatusUnknown, err - } - - if output == nil { - return nil, reportGroupStatusNotFound, nil - } - - return output, aws.StringValue(output.Status), nil - } -} diff --git a/internal/service/codebuild/wait.go b/internal/service/codebuild/wait.go deleted file mode 100644 index 207e940faa4..00000000000 --- a/internal/service/codebuild/wait.go +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package codebuild - -import ( - "context" - "time" - - "github.com/aws/aws-sdk-go/service/codebuild" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/retry" -) - -const ( - // Maximum amount of time to wait for an Operation to return Deleted - reportGroupDeleteTimeout = 2 * time.Minute -) - -// waitReportGroupDeleted waits for an ReportGroup to return Deleted -func waitReportGroupDeleted(ctx context.Context, conn *codebuild.CodeBuild, arn string) (*codebuild.ReportGroup, error) { - stateConf := &retry.StateChangeConf{ - Pending: []string{codebuild.ReportGroupStatusTypeDeleting}, - Target: []string{}, - Refresh: statusReportGroup(ctx, conn, arn), - Timeout: reportGroupDeleteTimeout, - } - - outputRaw, err := stateConf.WaitForStateContext(ctx) - - if output, ok := outputRaw.(*codebuild.ReportGroup); ok { - return output, err - } - - return nil, err -} From 9f5d5fe826f3459752b1705b6a744260d1f63781 Mon Sep 17 00:00:00 2001 From: Kit Ewbank Date: Mon, 22 Jan 2024 13:58:47 -0500 Subject: [PATCH 09/30] r/aws_codebuild_webhook: Migrate to AWS SDK for Go v2. --- internal/service/codebuild/consts.go | 4 - internal/service/codebuild/exports_test.go | 2 + .../service/codebuild/service_package_gen.go | 3 +- internal/service/codebuild/webhook.go | 366 ++++++++++-------- 4 files changed, 213 insertions(+), 162 deletions(-) diff --git a/internal/service/codebuild/consts.go b/internal/service/codebuild/consts.go index f5fafb3d3ee..2e5a776b4d2 100644 --- a/internal/service/codebuild/consts.go +++ b/internal/service/codebuild/consts.go @@ -7,10 +7,6 @@ import ( "time" ) -const ( - ResNameWebhook = "Webhook" -) - const ( propagationTimeout = 2 * time.Minute ) diff --git a/internal/service/codebuild/exports_test.go b/internal/service/codebuild/exports_test.go index f0c206ecdb4..07933981f2c 100644 --- a/internal/service/codebuild/exports_test.go +++ b/internal/service/codebuild/exports_test.go @@ -8,8 +8,10 @@ var ( ResourceReportGroup = resourceReportGroup ResourceResourcePolicy = resourceResourcePolicy ResourceSourceCredential = resourceSourceCredential + ResourceWebhook = resourceWebhook FindReportGroupByARN = findReportGroupByARN FindResourcePolicyByARN = findResourcePolicyByARN FindSourceCredentialsByARN = findSourceCredentialsByARN + FindWebhookByProjectName = findWebhookByProjectName ) diff --git a/internal/service/codebuild/service_package_gen.go b/internal/service/codebuild/service_package_gen.go index 683ed3e0043..d7a5c5bddc7 100644 --- a/internal/service/codebuild/service_package_gen.go +++ b/internal/service/codebuild/service_package_gen.go @@ -51,8 +51,9 @@ func (p *servicePackage) SDKResources(ctx context.Context) []*types.ServicePacka Name: "Source Credential", }, { - Factory: ResourceWebhook, + Factory: resourceWebhook, TypeName: "aws_codebuild_webhook", + Name: "Webhook", }, } } diff --git a/internal/service/codebuild/webhook.go b/internal/service/codebuild/webhook.go index bd1f38019ba..8d7b96ddcdc 100644 --- a/internal/service/codebuild/webhook.go +++ b/internal/service/codebuild/webhook.go @@ -4,25 +4,24 @@ package codebuild import ( - "bytes" "context" - "errors" - "fmt" + "log" - "github.com/aws/aws-sdk-go/aws" - "github.com/aws/aws-sdk-go/service/codebuild" - "github.com/hashicorp/aws-sdk-go-base/v2/awsv1shim/v2/tfawserr" + "github.com/aws/aws-sdk-go-v2/aws" + "github.com/aws/aws-sdk-go-v2/service/codebuild" + "github.com/aws/aws-sdk-go-v2/service/codebuild/types" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" "github.com/hashicorp/terraform-provider-aws/internal/conns" - "github.com/hashicorp/terraform-provider-aws/internal/create" + "github.com/hashicorp/terraform-provider-aws/internal/enum" + "github.com/hashicorp/terraform-provider-aws/internal/errs" "github.com/hashicorp/terraform-provider-aws/internal/errs/sdkdiag" - "github.com/hashicorp/terraform-provider-aws/names" + tfslices "github.com/hashicorp/terraform-provider-aws/internal/slices" + "github.com/hashicorp/terraform-provider-aws/internal/tfresource" ) -// @SDKResource("aws_codebuild_webhook") -func ResourceWebhook() *schema.Resource { +// @SDKResource("aws_codebuild_webhook", name="Webhook") +func resourceWebhook() *schema.Resource { return &schema.Resource{ CreateWithoutTimeout: resourceWebhookCreate, ReadWithoutTimeout: resourceWebhookRead, @@ -34,21 +33,16 @@ func ResourceWebhook() *schema.Resource { }, Schema: map[string]*schema.Schema{ - "project_name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - }, - "build_type": { - Type: schema.TypeString, - Optional: true, - ValidateFunc: validation.StringInSlice(codebuild.WebhookBuildType_Values(), false), - }, "branch_filter": { Type: schema.TypeString, Optional: true, ConflictsWith: []string{"filter_group"}, }, + "build_type": { + Type: schema.TypeString, + Optional: true, + ValidateDiagFunc: enum.Validate[types.WebhookBuildType](), + }, "filter_group": { Type: schema.TypeSet, Optional: true, @@ -59,11 +53,6 @@ func ResourceWebhook() *schema.Resource { Optional: true, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ - "type": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validation.StringInSlice(codebuild.WebhookFilterType_Values(), false), - }, "exclude_matched_pattern": { Type: schema.TypeBool, Optional: true, @@ -73,18 +62,27 @@ func ResourceWebhook() *schema.Resource { Type: schema.TypeString, Required: true, }, + "type": { + Type: schema.TypeString, + Required: true, + ValidateDiagFunc: enum.Validate[types.WebhookFilterType](), + }, }, }, }, }, }, - Set: resourceWebhookFilterHash, ConflictsWith: []string{"branch_filter"}, }, "payload_url": { Type: schema.TypeString, Computed: true, }, + "project_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + }, "secret": { Type: schema.TypeString, Computed: true, @@ -100,216 +98,270 @@ func ResourceWebhook() *schema.Resource { func resourceWebhookCreate(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { var diags diag.Diagnostics - conn := meta.(*conns.AWSClient).CodeBuildConn(ctx) + conn := meta.(*conns.AWSClient).CodeBuildClient(ctx) + projectName := d.Get("project_name").(string) input := &codebuild.CreateWebhookInput{ - ProjectName: aws.String(d.Get("project_name").(string)), - FilterGroups: expandWebhookFilterGroups(d), + ProjectName: aws.String(projectName), + } + + if v, ok := d.GetOk("branch_filter"); ok { + input.BranchFilter = aws.String(v.(string)) } if v, ok := d.GetOk("build_type"); ok { - input.BuildType = aws.String(v.(string)) + input.BuildType = types.WebhookBuildType(v.(string)) } - // The CodeBuild API requires this to be non-empty if defined - if v, ok := d.GetOk("branch_filter"); ok { - input.BranchFilter = aws.String(v.(string)) + if v, ok := d.GetOk("filter_group"); ok && v.(*schema.Set).Len() > 0 { + input.FilterGroups = expandWebhookFilterGroups(v.(*schema.Set).List()) } - resp, err := conn.CreateWebhookWithContext(ctx, input) + output, err := conn.CreateWebhook(ctx, input) + if err != nil { - return sdkdiag.AppendErrorf(diags, "creating CodeBuild Webhook: %s", err) + return sdkdiag.AppendErrorf(diags, "creating CodeBuild Webhook (%s): %s", projectName, err) } - // Secret is only returned on create, so capture it at the start - d.Set("secret", resp.Webhook.Secret) - d.SetId(d.Get("project_name").(string)) + d.SetId(projectName) + // Secret is only returned on create. + d.Set("secret", output.Webhook.Secret) return append(diags, resourceWebhookRead(ctx, d, meta)...) } -func expandWebhookFilterGroups(d *schema.ResourceData) [][]*codebuild.WebhookFilter { - configs := d.Get("filter_group").(*schema.Set).List() +func resourceWebhookRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { + var diags diag.Diagnostics + conn := meta.(*conns.AWSClient).CodeBuildClient(ctx) - webhookFilters := make([][]*codebuild.WebhookFilter, 0) + webhook, err := findWebhookByProjectName(ctx, conn, d.Id()) - if len(configs) == 0 { - return nil + if !d.IsNewResource() && tfresource.NotFound(err) { + log.Printf("[WARN] CodeBuild Webhook (%s) not found, removing from state", d.Id()) + d.SetId("") + return diags } - for _, config := range configs { - filters := expandWebhookFilterData(config.(map[string]interface{})) - webhookFilters = append(webhookFilters, filters) + if err != nil { + return sdkdiag.AppendErrorf(diags, "reading CodeBuild Webhook (%s): %s", d.Id(), err) } - return webhookFilters + d.Set("build_type", webhook.BuildType) + d.Set("branch_filter", webhook.BranchFilter) + d.Set("filter_group", flattenWebhookFilterGroups(webhook.FilterGroups)) + d.Set("payload_url", webhook.PayloadUrl) + d.Set("project_name", d.Id()) + d.Set("secret", d.Get("secret").(string)) + d.Set("url", webhook.Url) + + return diags } -func expandWebhookFilterData(data map[string]interface{}) []*codebuild.WebhookFilter { - filters := make([]*codebuild.WebhookFilter, 0) +func resourceWebhookUpdate(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { + var diags diag.Diagnostics + conn := meta.(*conns.AWSClient).CodeBuildClient(ctx) - filterConfigs := data["filter"].([]interface{}) + input := &codebuild.UpdateWebhookInput{ + ProjectName: aws.String(d.Id()), + } - for i, filterConfig := range filterConfigs { - filter := filterConfig.(map[string]interface{}) - filters = append(filters, &codebuild.WebhookFilter{ - Type: aws.String(filter["type"].(string)), - ExcludeMatchedPattern: aws.Bool(filter["exclude_matched_pattern"].(bool)), - }) - if v := filter["pattern"]; v != nil { - filters[i].Pattern = aws.String(v.(string)) - } + if v, ok := d.GetOk("build_type"); ok { + input.BuildType = types.WebhookBuildType(v.(string)) + } + + var filterGroups [][]types.WebhookFilter + if v, ok := d.GetOk("filter_group"); ok && v.(*schema.Set).Len() > 0 { + filterGroups = expandWebhookFilterGroups(v.(*schema.Set).List()) + } + if len(filterGroups) > 0 { + input.FilterGroups = filterGroups + } else { + input.BranchFilter = aws.String(d.Get("branch_filter").(string)) + } + + _, err := conn.UpdateWebhook(ctx, input) + + if err != nil { + return sdkdiag.AppendErrorf(diags, "updating CodeBuild Webhook (%s): %s", d.Id(), err) } - return filters + return append(diags, resourceWebhookRead(ctx, d, meta)...) } -func resourceWebhookRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { +func resourceWebhookDelete(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { var diags diag.Diagnostics - conn := meta.(*conns.AWSClient).CodeBuildConn(ctx) + conn := meta.(*conns.AWSClient).CodeBuildClient(ctx) - resp, err := conn.BatchGetProjectsWithContext(ctx, &codebuild.BatchGetProjectsInput{ - Names: []*string{ - aws.String(d.Id()), - }, + log.Printf("[INFO] Deleting CodeBuild Webhook: %s", d.Id()) + _, err := conn.DeleteWebhook(ctx, &codebuild.DeleteWebhookInput{ + ProjectName: aws.String(d.Id()), }) - if !d.IsNewResource() && tfawserr.ErrCodeEquals(err, codebuild.ErrCodeResourceNotFoundException) { - create.LogNotFoundRemoveState(names.CodeBuild, create.ErrActionReading, ResNameWebhook, d.Id()) - d.SetId("") + if errs.IsA[*types.ResourceNotFoundException](err) { return diags } if err != nil { - return create.AppendDiagError(diags, names.CodeBuild, create.ErrActionReading, ResNameWebhook, d.Id(), err) + return sdkdiag.AppendErrorf(diags, "deleting CodeBuild Webhook (%s): %s", d.Id(), err) } - if d.IsNewResource() && len(resp.Projects) == 0 { - return create.AppendDiagError(diags, names.CodeBuild, create.ErrActionReading, ResNameWebhook, d.Id(), errors.New("no project found after create")) + return diags +} + +func findWebhookByProjectName(ctx context.Context, conn *codebuild.Client, name string) (*types.Webhook, error) { + output, err := findProjectByName(ctx, conn, name) + + if err != nil { + return nil, err } - if !d.IsNewResource() && len(resp.Projects) == 0 { - create.LogNotFoundRemoveState(names.CodeBuild, create.ErrActionReading, ResNameWebhook, d.Id()) - d.SetId("") - return diags + if output.Webhook == nil { + return nil, tfresource.NewEmptyResultError(name) } - project := resp.Projects[0] + return output.Webhook, nil +} - if d.IsNewResource() && project.Webhook == nil { - return create.AppendDiagError(diags, names.CodeBuild, create.ErrActionReading, ResNameWebhook, d.Id(), errors.New("no webhook after creation")) +func findProjectByName(ctx context.Context, conn *codebuild.Client, name string) (*types.Project, error) { + input := &codebuild.BatchGetProjectsInput{ + Names: tfslices.Of(name), } - if !d.IsNewResource() && project.Webhook == nil { - create.LogNotFoundRemoveState(names.CodeBuild, create.ErrActionReading, ResNameWebhook, d.Id()) - d.SetId("") - return diags - } + return findProject(ctx, conn, input) +} - d.Set("build_type", project.Webhook.BuildType) - d.Set("branch_filter", project.Webhook.BranchFilter) - d.Set("filter_group", flattenWebhookFilterGroups(project.Webhook.FilterGroups)) - d.Set("payload_url", project.Webhook.PayloadUrl) - d.Set("project_name", project.Name) - d.Set("url", project.Webhook.Url) - // The secret is never returned after creation, so don't set it here +func findProject(ctx context.Context, conn *codebuild.Client, input *codebuild.BatchGetProjectsInput) (*types.Project, error) { + output, err := findProjects(ctx, conn, input) - return diags + if err != nil { + return nil, err + } + + return tfresource.AssertSingleValueResult(output) } -func resourceWebhookUpdate(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - var diags diag.Diagnostics - conn := meta.(*conns.AWSClient).CodeBuildConn(ctx) +func findProjects(ctx context.Context, conn *codebuild.Client, input *codebuild.BatchGetProjectsInput) ([]types.Project, error) { + output, err := conn.BatchGetProjects(ctx, input) - var err error - filterGroups := expandWebhookFilterGroups(d) + if err != nil { + return nil, err + } - var buildType *string - if v, ok := d.GetOk("build_type"); ok { - buildType = aws.String(v.(string)) + if output == nil { + return nil, tfresource.NewEmptyResultError(input) } - if len(filterGroups) >= 1 { - _, err = conn.UpdateWebhookWithContext(ctx, &codebuild.UpdateWebhookInput{ - ProjectName: aws.String(d.Id()), - BuildType: buildType, - FilterGroups: filterGroups, - RotateSecret: aws.Bool(false), - }) - } else { - _, err = conn.UpdateWebhookWithContext(ctx, &codebuild.UpdateWebhookInput{ - ProjectName: aws.String(d.Id()), - BuildType: buildType, - BranchFilter: aws.String(d.Get("branch_filter").(string)), - RotateSecret: aws.Bool(false), - }) + return output.Projects, nil +} + +func expandWebhookFilterGroups(tfList []interface{}) [][]types.WebhookFilter { + if len(tfList) == 0 { + return nil } - if err != nil { - return sdkdiag.AppendErrorf(diags, "updating CodeBuild Webhook (%s): %s", d.Id(), err) + var apiObjects [][]types.WebhookFilter + + for _, tfMapRaw := range tfList { + tfMap, ok := tfMapRaw.(map[string]interface{}) + if !ok { + continue + } + + if v, ok := tfMap["filter"].([]interface{}); ok && len(v) > 0 { + apiObjects = append(apiObjects, expandWebhookFilters(v)) + } } - return append(diags, resourceWebhookRead(ctx, d, meta)...) + return apiObjects } -func resourceWebhookDelete(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - var diags diag.Diagnostics - conn := meta.(*conns.AWSClient).CodeBuildConn(ctx) +func expandWebhookFilters(tfList []interface{}) []types.WebhookFilter { + if len(tfList) == 0 { + return nil + } - _, err := conn.DeleteWebhookWithContext(ctx, &codebuild.DeleteWebhookInput{ - ProjectName: aws.String(d.Id()), - }) + var apiObjects []types.WebhookFilter - if err != nil { - if tfawserr.ErrCodeEquals(err, codebuild.ErrCodeResourceNotFoundException) { - return diags + for _, tfMapRaw := range tfList { + tfMap, ok := tfMapRaw.(map[string]interface{}) + if !ok { + continue } - return sdkdiag.AppendErrorf(diags, "deleting CodeBuild Webhook (%s): %s", d.Id(), err) + + apiObject := expandWebhookFilter(tfMap) + + if apiObject == nil { + continue + } + + apiObjects = append(apiObjects, *apiObject) } - return diags + return apiObjects +} + +func expandWebhookFilter(tfMap map[string]interface{}) *types.WebhookFilter { + if tfMap == nil { + return nil + } + + apiObject := &types.WebhookFilter{} + + if v, ok := tfMap["exclude_matched_pattern"].(bool); ok { + apiObject.ExcludeMatchedPattern = aws.Bool(v) + } + + if v, ok := tfMap["pattern"].(string); ok && v != "" { + apiObject.Pattern = aws.String(v) + } + + if v, ok := tfMap["types"].(string); ok && v != "" { + apiObject.Type = types.WebhookFilterType(v) + } + + return apiObject } -func flattenWebhookFilterGroups(filterList [][]*codebuild.WebhookFilter) *schema.Set { - filterSet := schema.Set{ - F: resourceWebhookFilterHash, +func flattenWebhookFilterGroups(apiObjects [][]types.WebhookFilter) []interface{} { + if len(apiObjects) == 0 { + return nil } - for _, filters := range filterList { - filterSet.Add(flattenWebhookFilterData(filters)) + var tfList []interface{} + + for _, apiObject := range apiObjects { + tfList = append(tfList, flattenWebhookFilters(apiObject)) } - return &filterSet + + return tfList } -func resourceWebhookFilterHash(v interface{}) int { - var buf bytes.Buffer - m := v.(map[string]interface{}) +func flattenWebhookFilters(apiObjects []types.WebhookFilter) []interface{} { + if len(apiObjects) == 0 { + return nil + } - for _, g := range m { - for _, f := range g.([]interface{}) { - r := f.(map[string]interface{}) - buf.WriteString(fmt.Sprintf("%s-", r["type"].(string))) - buf.WriteString(fmt.Sprintf("%s-", r["pattern"].(string))) - buf.WriteString(fmt.Sprintf("%q", r["exclude_matched_pattern"])) - } + var tfList []interface{} + + for _, apiObject := range apiObjects { + tfList = append(tfList, flattenWebhookFilter(apiObject)) } - return create.StringHashcode(buf.String()) + return tfList } -func flattenWebhookFilterData(filters []*codebuild.WebhookFilter) map[string]interface{} { - values := map[string]interface{}{} - ff := make([]interface{}, 0) +func flattenWebhookFilter(apiObject types.WebhookFilter) map[string]interface{} { + tfMap := map[string]interface{}{ + "type": apiObject.Type, + } - for _, f := range filters { - ff = append(ff, map[string]interface{}{ - "type": *f.Type, - "pattern": *f.Pattern, - "exclude_matched_pattern": *f.ExcludeMatchedPattern, - }) + if v := apiObject.ExcludeMatchedPattern; v != nil { + tfMap["exclude_matched_pattern"] = aws.ToBool(v) } - values["filter"] = ff + if v := apiObject.Pattern; v != nil { + tfMap["pattern"] = aws.ToString(v) + } - return values + return tfMap } From 104ee1f4ba9e9518aca73be06d46f032f7c9cf59 Mon Sep 17 00:00:00 2001 From: Kit Ewbank Date: Mon, 22 Jan 2024 16:55:43 -0500 Subject: [PATCH 10/30] r/aws_codebuild_project: Migrate to AWS SDK for Go v2. --- internal/service/codebuild/exports_test.go | 2 + internal/service/codebuild/find.go | 33 - internal/service/codebuild/project.go | 2010 ++++++++--------- .../service/codebuild/service_package_gen.go | 2 +- internal/service/codebuild/webhook.go | 35 +- 5 files changed, 989 insertions(+), 1093 deletions(-) delete mode 100644 internal/service/codebuild/find.go diff --git a/internal/service/codebuild/exports_test.go b/internal/service/codebuild/exports_test.go index 07933981f2c..64454498530 100644 --- a/internal/service/codebuild/exports_test.go +++ b/internal/service/codebuild/exports_test.go @@ -5,11 +5,13 @@ package codebuild // Exports for use in tests only. var ( + ResourceProject = resourceProject ResourceReportGroup = resourceReportGroup ResourceResourcePolicy = resourceResourcePolicy ResourceSourceCredential = resourceSourceCredential ResourceWebhook = resourceWebhook + FindProjectByNameOrARN = findProjectByNameOrARN FindReportGroupByARN = findReportGroupByARN FindResourcePolicyByARN = findResourcePolicyByARN FindSourceCredentialsByARN = findSourceCredentialsByARN diff --git a/internal/service/codebuild/find.go b/internal/service/codebuild/find.go deleted file mode 100644 index 8a11d2a1e14..00000000000 --- a/internal/service/codebuild/find.go +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package codebuild - -import ( - "context" - - "github.com/aws/aws-sdk-go/aws" - "github.com/aws/aws-sdk-go/service/codebuild" - "github.com/hashicorp/terraform-provider-aws/internal/tfresource" -) - -func FindProjectByARN(ctx context.Context, conn *codebuild.CodeBuild, arn string) (*codebuild.Project, error) { - input := &codebuild.BatchGetProjectsInput{ - Names: []*string{aws.String(arn)}, - } - - output, err := conn.BatchGetProjectsWithContext(ctx, input) - if err != nil { - return nil, err - } - - if output == nil || len(output.Projects) == 0 || output.Projects[0] == nil { - return nil, tfresource.NewEmptyResultError(input) - } - - if count := len(output.Projects); count > 1 { - return nil, tfresource.NewTooManyResultsError(count, input) - } - - return output.Projects[0], nil -} diff --git a/internal/service/codebuild/project.go b/internal/service/codebuild/project.go index 410272774d3..577e186a021 100644 --- a/internal/service/codebuild/project.go +++ b/internal/service/codebuild/project.go @@ -4,25 +4,23 @@ package codebuild import ( - "bytes" "context" "fmt" "log" - "time" "github.com/YakDriver/regexache" - "github.com/aws/aws-sdk-go/aws" - "github.com/aws/aws-sdk-go/service/codebuild" - "github.com/hashicorp/aws-sdk-go-base/v2/awsv1shim/v2/tfawserr" + "github.com/aws/aws-sdk-go-v2/aws" + "github.com/aws/aws-sdk-go-v2/service/codebuild" + "github.com/aws/aws-sdk-go-v2/service/codebuild/types" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/retry" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" "github.com/hashicorp/terraform-provider-aws/internal/conns" - "github.com/hashicorp/terraform-provider-aws/internal/create" + "github.com/hashicorp/terraform-provider-aws/internal/enum" "github.com/hashicorp/terraform-provider-aws/internal/errs/sdkdiag" "github.com/hashicorp/terraform-provider-aws/internal/flex" + tfslices "github.com/hashicorp/terraform-provider-aws/internal/slices" tftags "github.com/hashicorp/terraform-provider-aws/internal/tags" "github.com/hashicorp/terraform-provider-aws/internal/tfresource" "github.com/hashicorp/terraform-provider-aws/internal/verify" @@ -31,12 +29,13 @@ import ( // @SDKResource("aws_codebuild_project", name="Project") // @Tags -func ResourceProject() *schema.Resource { +func resourceProject() *schema.Resource { return &schema.Resource{ CreateWithoutTimeout: resourceProjectCreate, ReadWithoutTimeout: resourceProjectRead, UpdateWithoutTimeout: resourceProjectUpdate, DeleteWithoutTimeout: resourceProjectDelete, + Importer: &schema.ResourceImporter{ StateContext: schema.ImportStatePassthroughContext, }, @@ -57,9 +56,18 @@ func ResourceProject() *schema.Resource { Optional: true, }, "bucket_owner_access": { - Type: schema.TypeString, - Optional: true, - ValidateFunc: validation.StringInSlice(codebuild.BucketOwnerAccess_Values(), false), + Type: schema.TypeString, + Optional: true, + ValidateDiagFunc: enum.Validate[types.BucketOwnerAccess](), + }, + "encryption_disabled": { + Type: schema.TypeBool, + Optional: true, + Default: false, + }, + "location": { + Type: schema.TypeString, + Optional: true, }, "name": { Type: schema.TypeString, @@ -71,57 +79,57 @@ func ResourceProject() *schema.Resource { return false }, }, - "encryption_disabled": { - Type: schema.TypeBool, - Optional: true, - Default: false, - }, - "location": { - Type: schema.TypeString, - Optional: true, - }, "namespace_type": { Type: schema.TypeString, Optional: true, DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool { - if d.Get("artifacts.0.type") == codebuild.ArtifactsTypeS3 { - return old == codebuild.ArtifactNamespaceNone && new == "" + if d.Get("artifacts.0.type") == types.ArtifactsTypeS3 { + return types.ArtifactNamespace(old) == types.ArtifactNamespaceNone && new == "" } return false }, - ValidateFunc: validation.StringInSlice(codebuild.ArtifactNamespace_Values(), false), + ValidateDiagFunc: enum.Validate[types.ArtifactNamespace](), + }, + "override_artifact_name": { + Type: schema.TypeBool, + Optional: true, + Default: false, }, "packaging": { Type: schema.TypeString, Optional: true, DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool { switch d.Get("artifacts.0.type") { - case codebuild.ArtifactsTypeCodepipeline: + case types.ArtifactsTypeCodepipeline: return new == "" - case codebuild.ArtifactsTypeS3: - return old == codebuild.ArtifactPackagingNone && new == "" + case types.ArtifactsTypeS3: + return types.ArtifactPackaging(old) == types.ArtifactPackagingNone && new == "" } return false }, - ValidateFunc: validation.StringInSlice(codebuild.ArtifactPackaging_Values(), false), + ValidateDiagFunc: enum.Validate[types.ArtifactPackaging](), }, "path": { Type: schema.TypeString, Optional: true, }, "type": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validation.StringInSlice(codebuild.ArtifactsType_Values(), false), - }, - "override_artifact_name": { - Type: schema.TypeBool, - Optional: true, - Default: false, + Type: schema.TypeString, + Required: true, + ValidateDiagFunc: enum.Validate[types.ArtifactsType](), }, }, }, }, + "badge_enabled": { + Type: schema.TypeBool, + Optional: true, + Default: false, + }, + "badge_url": { + Type: schema.TypeString, + Computed: true, + }, "build_batch_config": { Type: schema.TypeList, MaxItems: 1, @@ -142,8 +150,8 @@ func ResourceProject() *schema.Resource { Type: schema.TypeList, Optional: true, Elem: &schema.Schema{ - Type: schema.TypeString, - ValidateFunc: validation.StringInSlice(codebuild.ComputeType_Values(), false), + Type: schema.TypeString, + ValidateDiagFunc: enum.Validate[types.ComputeType](), }, }, "maximum_builds_allowed": { @@ -167,6 +175,21 @@ func ResourceProject() *schema.Resource { }, }, }, + "build_timeout": { + Type: schema.TypeInt, + Optional: true, + Default: 60, + ValidateFunc: validation.IntBetween(5, 480), + DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool { + if d.Get("environment.0.type") == types.EnvironmentTypeArmLambdaContainer { + return true + } + if d.Get("environment.0.type") == types.EnvironmentTypeLinuxLambdaContainer { + return true + } + return false + }, + }, "cache": { Type: schema.TypeList, Optional: true, @@ -174,12 +197,6 @@ func ResourceProject() *schema.Resource { DiffSuppressFunc: verify.SuppressMissingOptionalConfigurationBlock, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ - "type": { - Type: schema.TypeString, - Optional: true, - Default: codebuild.CacheTypeNoCache, - ValidateFunc: validation.StringInSlice(codebuild.CacheType_Values(), false), - }, "location": { Type: schema.TypeString, Optional: true, @@ -188,10 +205,16 @@ func ResourceProject() *schema.Resource { Type: schema.TypeList, Optional: true, Elem: &schema.Schema{ - Type: schema.TypeString, - ValidateFunc: validation.StringInSlice(codebuild.CacheMode_Values(), false), + Type: schema.TypeString, + ValidateDiagFunc: enum.Validate[types.CacheMode](), }, }, + "type": { + Type: schema.TypeString, + Optional: true, + Default: types.CacheTypeNoCache, + ValidateDiagFunc: enum.Validate[types.CacheType](), + }, }, }, }, @@ -217,10 +240,15 @@ func ResourceProject() *schema.Resource { MaxItems: 1, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ - "compute_type": { + "certificate": { Type: schema.TypeString, - Required: true, - ValidateFunc: validation.StringInSlice(codebuild.ComputeType_Values(), false), + Optional: true, + ValidateFunc: validation.StringMatch(regexache.MustCompile(`\.(pem|zip)$`), "must end in .pem or .zip"), + }, + "compute_type": { + Type: schema.TypeString, + Required: true, + ValidateDiagFunc: enum.Validate[types.ComputeType](), }, "environment_variable": { Type: schema.TypeList, @@ -231,16 +259,16 @@ func ResourceProject() *schema.Resource { Type: schema.TypeString, Required: true, }, + "type": { + Type: schema.TypeString, + Optional: true, + Default: types.EnvironmentVariableTypePlaintext, + ValidateDiagFunc: enum.Validate[types.EnvironmentVariableType](), + }, "value": { Type: schema.TypeString, Required: true, }, - "type": { - Type: schema.TypeString, - Optional: true, - ValidateFunc: validation.StringInSlice(codebuild.EnvironmentVariableType_Values(), false), - Default: codebuild.EnvironmentVariableTypePlaintext, - }, }, }, }, @@ -248,27 +276,17 @@ func ResourceProject() *schema.Resource { Type: schema.TypeString, Required: true, }, - "type": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validation.StringInSlice(codebuild.EnvironmentType_Values(), false), - }, "image_pull_credentials_type": { - Type: schema.TypeString, - Optional: true, - Default: codebuild.ImagePullCredentialsTypeCodebuild, - ValidateFunc: validation.StringInSlice(codebuild.ImagePullCredentialsType_Values(), false), + Type: schema.TypeString, + Optional: true, + Default: types.ImagePullCredentialsTypeCodebuild, + ValidateDiagFunc: enum.Validate[types.ImagePullCredentialsType](), }, "privileged_mode": { Type: schema.TypeBool, Optional: true, Default: false, }, - "certificate": { - Type: schema.TypeString, - Optional: true, - ValidateFunc: validation.StringMatch(regexache.MustCompile(`\.(pem|zip)$`), "must end in .pem or .zip"), - }, "registry_credential": { Type: schema.TypeList, Optional: true, @@ -280,13 +298,18 @@ func ResourceProject() *schema.Resource { Required: true, }, "credential_provider": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validation.StringInSlice(codebuild.CredentialProviderType_Values(), false), + Type: schema.TypeString, + Required: true, + ValidateDiagFunc: enum.Validate[types.CredentialProviderType](), }, }, }, }, + "type": { + Type: schema.TypeString, + Required: true, + ValidateDiagFunc: enum.Validate[types.EnvironmentType](), + }, }, }, }, @@ -312,10 +335,10 @@ func ResourceProject() *schema.Resource { Optional: true, }, "type": { - Type: schema.TypeString, - Optional: true, - Default: codebuild.FileSystemTypeEfs, - ValidateFunc: validation.StringInSlice(codebuild.FileSystemType_Values(), false), + Type: schema.TypeString, + Optional: true, + Default: types.FileSystemTypeEfs, + ValidateDiagFunc: enum.Validate[types.FileSystemType](), }, }, }, @@ -332,16 +355,16 @@ func ResourceProject() *schema.Resource { MaxItems: 1, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ - "status": { - Type: schema.TypeString, - Optional: true, - Default: codebuild.LogsConfigStatusTypeEnabled, - ValidateFunc: validation.StringInSlice(codebuild.LogsConfigStatusType_Values(), false), - }, "group_name": { Type: schema.TypeString, Optional: true, }, + "status": { + Type: schema.TypeString, + Optional: true, + Default: types.LogsConfigStatusTypeEnabled, + ValidateDiagFunc: enum.Validate[types.LogsConfigStatusType](), + }, "stream_name": { Type: schema.TypeString, Optional: true, @@ -357,25 +380,25 @@ func ResourceProject() *schema.Resource { Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ "bucket_owner_access": { - Type: schema.TypeString, - Optional: true, - ValidateFunc: validation.StringInSlice(codebuild.BucketOwnerAccess_Values(), false), + Type: schema.TypeString, + Optional: true, + ValidateDiagFunc: enum.Validate[types.BucketOwnerAccess](), }, - "status": { - Type: schema.TypeString, - Optional: true, - Default: codebuild.LogsConfigStatusTypeDisabled, - ValidateFunc: validation.StringInSlice(codebuild.LogsConfigStatusType_Values(), false), + "encryption_disabled": { + Type: schema.TypeBool, + Optional: true, + Default: false, }, "location": { Type: schema.TypeString, Optional: true, ValidateFunc: validProjectS3LogsLocation, }, - "encryption_disabled": { - Type: schema.TypeBool, - Optional: true, - Default: false, + "status": { + Type: schema.TypeString, + Optional: true, + Default: types.LogsConfigStatusTypeDisabled, + ValidateDiagFunc: enum.Validate[types.LogsConfigStatusType](), }, }, }, @@ -391,21 +414,50 @@ func ResourceProject() *schema.Resource { ForceNew: true, ValidateFunc: ValidProjectName, }, + "project_visibility": { + Type: schema.TypeString, + Optional: true, + Default: types.ProjectVisibilityTypePrivate, + ValidateDiagFunc: enum.Validate[types.ProjectVisibilityType](), + }, + "public_project_alias": { + Type: schema.TypeString, + Computed: true, + }, + "queued_timeout": { + Type: schema.TypeInt, + Optional: true, + Default: 480, + ValidateFunc: validation.IntBetween(5, 480), + DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool { + if d.Get("environment.0.type") == types.EnvironmentTypeArmLambdaContainer { + return true + } + if d.Get("environment.0.type") == types.EnvironmentTypeLinuxLambdaContainer { + return true + } + return false + }, + }, + "resource_access_role": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: verify.ValidARN, + }, "secondary_artifacts": { Type: schema.TypeSet, Optional: true, MaxItems: 12, - Set: resourceProjectArtifactsHash, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ - "name": { + "artifact_identifier": { Type: schema.TypeString, - Optional: true, + Required: true, }, "bucket_owner_access": { - Type: schema.TypeString, - Optional: true, - ValidateFunc: validation.StringInSlice(codebuild.BucketOwnerAccess_Values(), false), + Type: schema.TypeString, + Optional: true, + ValidateDiagFunc: enum.Validate[types.BucketOwnerAccess](), }, "encryption_disabled": { Type: schema.TypeBool, @@ -416,11 +468,15 @@ func ResourceProject() *schema.Resource { Type: schema.TypeString, Optional: true, }, + "name": { + Type: schema.TypeString, + Optional: true, + }, "namespace_type": { - Type: schema.TypeString, - Optional: true, - ValidateFunc: validation.StringInSlice(codebuild.ArtifactNamespace_Values(), false), - Default: codebuild.ArtifactNamespaceNone, + Type: schema.TypeString, + Optional: true, + Default: types.ArtifactNamespaceNone, + ValidateDiagFunc: enum.Validate[types.ArtifactNamespace](), }, "override_artifact_name": { Type: schema.TypeBool, @@ -428,23 +484,19 @@ func ResourceProject() *schema.Resource { Default: false, }, "packaging": { - Type: schema.TypeString, - Optional: true, - ValidateFunc: validation.StringInSlice(codebuild.ArtifactPackaging_Values(), false), - Default: codebuild.ArtifactPackagingNone, + Type: schema.TypeString, + Optional: true, + Default: types.ArtifactPackagingNone, + ValidateDiagFunc: enum.Validate[types.ArtifactPackaging](), }, "path": { Type: schema.TypeString, Optional: true, }, - "artifact_identifier": { - Type: schema.TypeString, - Required: true, - }, "type": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validation.StringInSlice(codebuild.ArtifactsType_Values(), false), + Type: schema.TypeString, + Required: true, + ValidateDiagFunc: enum.Validate[types.ArtifactsType](), }, }, }, @@ -455,19 +507,27 @@ func ResourceProject() *schema.Resource { MaxItems: 12, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ - "buildspec": { - Type: schema.TypeString, + "build_status_config": { + Type: schema.TypeList, Optional: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "context": { + Type: schema.TypeString, + Optional: true, + }, + "target_url": { + Type: schema.TypeString, + Optional: true, + }, + }, + }, }, - "location": { + "buildspec": { Type: schema.TypeString, Optional: true, }, - "type": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validation.StringInSlice(codebuild.SourceType_Values(), false), - }, "git_clone_depth": { Type: schema.TypeInt, Optional: true, @@ -490,6 +550,10 @@ func ResourceProject() *schema.Resource { Type: schema.TypeBool, Optional: true, }, + "location": { + Type: schema.TypeString, + Optional: true, + }, "report_build_status": { Type: schema.TypeBool, Optional: true, @@ -498,22 +562,10 @@ func ResourceProject() *schema.Resource { Type: schema.TypeString, Required: true, }, - "build_status_config": { - Type: schema.TypeList, - Optional: true, - MaxItems: 1, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "context": { - Type: schema.TypeString, - Optional: true, - }, - "target_url": { - Type: schema.TypeString, - Optional: true, - }, - }, - }, + "type": { + Type: schema.TypeString, + Required: true, + ValidateDiagFunc: enum.Validate[types.SourceType](), }, }, }, @@ -546,19 +598,27 @@ func ResourceProject() *schema.Resource { Required: true, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ - "buildspec": { - Type: schema.TypeString, + "build_status_config": { + Type: schema.TypeList, Optional: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "context": { + Type: schema.TypeString, + Optional: true, + }, + "target_url": { + Type: schema.TypeString, + Optional: true, + }, + }, + }, }, - "location": { + "buildspec": { Type: schema.TypeString, Optional: true, }, - "type": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validation.StringInSlice(codebuild.SourceType_Values(), false), - }, "git_clone_depth": { Type: schema.TypeInt, Optional: true, @@ -581,26 +641,18 @@ func ResourceProject() *schema.Resource { Type: schema.TypeBool, Optional: true, }, - "report_build_status": { - Type: schema.TypeBool, + "location": { + Type: schema.TypeString, Optional: true, }, - "build_status_config": { - Type: schema.TypeList, + "type": { + Type: schema.TypeString, + Required: true, + ValidateDiagFunc: enum.Validate[types.SourceType](), + }, + "report_build_status": { + Type: schema.TypeBool, Optional: true, - MaxItems: 1, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "context": { - Type: schema.TypeString, - Optional: true, - }, - "target_url": { - Type: schema.TypeString, - Optional: true, - }, - }, - }, }, }, }, @@ -609,60 +661,6 @@ func ResourceProject() *schema.Resource { Type: schema.TypeString, Optional: true, }, - "project_visibility": { - Type: schema.TypeString, - Optional: true, - Default: codebuild.ProjectVisibilityTypePrivate, - ValidateFunc: validation.StringInSlice(codebuild.ProjectVisibilityType_Values(), false), - }, - "public_project_alias": { - Type: schema.TypeString, - Computed: true, - }, - "resource_access_role": { - Type: schema.TypeString, - Optional: true, - ValidateFunc: verify.ValidARN, - }, - "build_timeout": { - Type: schema.TypeInt, - Optional: true, - Default: 60, - ValidateFunc: validation.IntBetween(5, 480), - DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool { - if d.Get("environment.0.type") == codebuild.EnvironmentTypeArmLambdaContainer { - return true - } - if d.Get("environment.0.type") == codebuild.EnvironmentTypeLinuxLambdaContainer { - return true - } - return false - }, - }, - "queued_timeout": { - Type: schema.TypeInt, - Optional: true, - Default: 480, - ValidateFunc: validation.IntBetween(5, 480), - DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool { - if d.Get("environment.0.type") == codebuild.EnvironmentTypeArmLambdaContainer { - return true - } - if d.Get("environment.0.type") == codebuild.EnvironmentTypeLinuxLambdaContainer { - return true - } - return false - }, - }, - "badge_enabled": { - Type: schema.TypeBool, - Optional: true, - Default: false, - }, - "badge_url": { - Type: schema.TypeString, - Computed: true, - }, names.AttrTags: tftags.TagsSchema(), names.AttrTagsAll: tftags.TagsSchemaComputed(), "vpc_config": { @@ -671,10 +669,6 @@ func ResourceProject() *schema.Resource { MaxItems: 1, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ - "vpc_id": { - Type: schema.TypeString, - Required: true, - }, "subnets": { Type: schema.TypeSet, Required: true, @@ -687,6 +681,10 @@ func ResourceProject() *schema.Resource { Elem: &schema.Schema{Type: schema.TypeString}, MaxItems: 5, }, + "vpc_id": { + Type: schema.TypeString, + Required: true, + }, }, }, }, @@ -696,7 +694,7 @@ func ResourceProject() *schema.Resource { func(_ context.Context, diff *schema.ResourceDiff, v interface{}) error { // Plan time validation for cache location cacheType, cacheTypeOk := diff.GetOk("cache.0.type") - if !cacheTypeOk || cacheType.(string) == codebuild.CacheTypeNoCache || cacheType.(string) == codebuild.CacheTypeLocal { + if !cacheTypeOk || types.CacheType(cacheType.(string)) == types.CacheTypeNoCache || types.CacheType(cacheType.(string)) == types.CacheTypeLocal { return nil } if v, ok := diff.GetOk("cache.0.location"); ok && v.(string) != "" { @@ -715,46 +713,48 @@ func ResourceProject() *schema.Resource { func resourceProjectCreate(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { var diags diag.Diagnostics - conn := meta.(*conns.AWSClient).CodeBuildConn(ctx) - - projectEnv := expandProjectEnvironment(d) - projectSource := expandProjectSource(d) - projectArtifacts := expandProjectArtifacts(d) - projectSecondaryArtifacts := expandProjectSecondaryArtifacts(d) - projectSecondarySources := expandProjectSecondarySources(d) - projectLogsConfig := expandProjectLogsConfig(d) - projectBatchConfig := expandBuildBatchConfig(d) - projectFileSystemLocations := expandProjectFileSystemLocations(d) - - if aws.StringValue(projectSource.Type) == codebuild.SourceTypeNoSource { - if aws.StringValue(projectSource.Buildspec) == "" { + conn := meta.(*conns.AWSClient).CodeBuildClient(ctx) + + var projectSource *types.ProjectSource + if v, ok := d.GetOk("source"); ok && len(v.([]interface{})) > 0 && v.([]interface{})[0] != nil { + projectSource = expandProjectSource(v.([]interface{})[0].(map[string]interface{})) + } + + if projectSource != nil && projectSource.Type == types.SourceTypeNoSource { + if aws.ToString(projectSource.Buildspec) == "" { return sdkdiag.AppendErrorf(diags, "`buildspec` must be set when source's `type` is `NO_SOURCE`") } - if aws.StringValue(projectSource.Location) != "" { + if aws.ToString(projectSource.Location) != "" { return sdkdiag.AppendErrorf(diags, "`location` must be empty when source's `type` is `NO_SOURCE`") } } + name := d.Get("name").(string) input := &codebuild.CreateProjectInput{ - Environment: projectEnv, - Name: aws.String(d.Get("name").(string)), - Source: &projectSource, - Artifacts: &projectArtifacts, - SecondaryArtifacts: projectSecondaryArtifacts, - SecondarySources: projectSecondarySources, - LogsConfig: projectLogsConfig, - BuildBatchConfig: projectBatchConfig, - FileSystemLocations: projectFileSystemLocations, - Tags: getTagsIn(ctx), + Name: aws.String(name), + Source: projectSource, + Tags: getTagsIn(ctx), + } + + if v, ok := d.GetOk("artifacts"); ok && len(v.([]interface{})) > 0 && v.([]interface{})[0] != nil { + input.Artifacts = expandProjectArtifacts(v.([]interface{})[0].(map[string]interface{})) + } + + if v, ok := d.GetOk("badge_enabled"); ok { + input.BadgeEnabled = aws.Bool(v.(bool)) + } + + if v, ok := d.GetOk("build_batch_config"); ok && len(v.([]interface{})) > 0 && v.([]interface{})[0] != nil { + input.BuildBatchConfig = expandProjectBuildBatchConfig(v.([]interface{})[0].(map[string]interface{})) } - if v, ok := d.GetOk("cache"); ok { - input.Cache = expandProjectCache(v.([]interface{})) + if v, ok := d.GetOk("cache"); ok && len(v.([]interface{})) > 0 && v.([]interface{})[0] != nil { + input.Cache = expandProjectCache(v.([]interface{})[0].(map[string]interface{})) } if v, ok := d.GetOk("concurrent_build_limit"); ok { - input.ConcurrentBuildLimit = aws.Int64(int64(v.(int))) + input.ConcurrentBuildLimit = aws.Int32(int32(v.(int))) } if v, ok := d.GetOk("description"); ok { @@ -765,814 +765,849 @@ func resourceProjectCreate(ctx context.Context, d *schema.ResourceData, meta int input.EncryptionKey = aws.String(v.(string)) } - if v, ok := d.GetOk("service_role"); ok { - input.ServiceRole = aws.String(v.(string)) + if v, ok := d.GetOk("environment"); ok && len(v.([]interface{})) > 0 && v.([]interface{})[0] != nil { + input.Environment = expandProjectEnvironment(v.([]interface{})[0].(map[string]interface{})) } - if v, ok := d.GetOk("source_version"); ok { - input.SourceVersion = aws.String(v.(string)) + if v, ok := d.GetOk("file_system_locations"); ok && v.(*schema.Set).Len() > 0 { + input.FileSystemLocations = expandProjectFileSystemLocations(v.(*schema.Set).List()) } - if v, ok := d.GetOk("build_timeout"); ok { - input.TimeoutInMinutes = aws.Int64(int64(v.(int))) + if v, ok := d.GetOk("logs_config"); ok && len(v.([]interface{})) > 0 && v.([]interface{})[0] != nil { + input.LogsConfig = expandProjectLogsConfig(v.([]interface{})[0].(map[string]interface{})) } if v, ok := d.GetOk("queued_timeout"); ok { - input.QueuedTimeoutInMinutes = aws.Int64(int64(v.(int))) + input.QueuedTimeoutInMinutes = aws.Int32(int32(v.(int))) } - if v, ok := d.GetOk("vpc_config"); ok { - input.VpcConfig = expandVPCConfig(v.([]interface{})) + if v, ok := d.GetOk("secondary_artifacts"); ok && v.(*schema.Set).Len() > 0 { + input.SecondaryArtifacts = expandProjectSecondaryArtifacts(v.(*schema.Set).List()) } - if v, ok := d.GetOk("badge_enabled"); ok { - input.BadgeEnabled = aws.Bool(v.(bool)) + if v, ok := d.GetOk("secondary_sources"); ok && v.(*schema.Set).Len() > 0 { + input.SecondarySources = expandProjectSecondarySources(v.(*schema.Set).List()) } if v, ok := d.GetOk("secondary_source_version"); ok && v.(*schema.Set).Len() > 0 { - input.SecondarySourceVersions = expandProjectSecondarySourceVersions(v.(*schema.Set)) + input.SecondarySourceVersions = expandProjectSecondarySourceVersions(v.(*schema.Set).List()) } - var resp *codebuild.CreateProjectOutput - // Handle IAM eventual consistency - err := retry.RetryContext(ctx, 5*time.Minute, func() *retry.RetryError { - var err error - - resp, err = conn.CreateProjectWithContext(ctx, input) - if err != nil { - // InvalidInputException: CodeBuild is not authorized to perform - // InvalidInputException: Not authorized to perform DescribeSecurityGroups - if tfawserr.ErrMessageContains(err, codebuild.ErrCodeInvalidInputException, "ot authorized to perform") { - return retry.RetryableError(err) - } + if v, ok := d.GetOk("service_role"); ok { + input.ServiceRole = aws.String(v.(string)) + } - return retry.NonRetryableError(err) - } + if v, ok := d.GetOk("source_version"); ok { + input.SourceVersion = aws.String(v.(string)) + } - return nil - }) + if v, ok := d.GetOk("build_timeout"); ok { + input.TimeoutInMinutes = aws.Int32(int32(v.(int))) + } - if tfresource.TimedOut(err) { - resp, err = conn.CreateProjectWithContext(ctx, input) + if v, ok := d.GetOk("environvpc_configment"); ok && len(v.([]interface{})) > 0 && v.([]interface{})[0] != nil { + input.VpcConfig = expandVPCConfig(v.([]interface{})[0].(map[string]interface{})) } + + // InvalidInputException: CodeBuild is not authorized to perform + // InvalidInputException: Not authorized to perform DescribeSecurityGroups + outputRaw, err := tfresource.RetryWhenIsAErrorMessageContains[*types.InvalidInputException](ctx, propagationTimeout, func() (interface{}, error) { + return conn.CreateProject(ctx, input) + }, "ot authorized to perform") + if err != nil { - return sdkdiag.AppendErrorf(diags, "creating CodeBuild project: %s", err) + return sdkdiag.AppendErrorf(diags, "creating CodeBuild Project (%s): %s", name, err) } - d.SetId(aws.StringValue(resp.Project.Arn)) + d.SetId(aws.ToString(outputRaw.(*codebuild.CreateProjectOutput).Project.Arn)) - if v, ok := d.GetOk("project_visibility"); ok && v.(string) != codebuild.ProjectVisibilityTypePrivate { - visInput := &codebuild.UpdateProjectVisibilityInput{ - ProjectArn: aws.String(d.Id()), - ProjectVisibility: aws.String(v.(string)), - } + if v, ok := d.GetOk("project_visibility"); ok { + if v := types.ProjectVisibilityType(v.(string)); v != types.ProjectVisibilityTypePrivate { + input := &codebuild.UpdateProjectVisibilityInput{ + ProjectArn: aws.String(d.Id()), + ProjectVisibility: v, + } - if v, ok := d.GetOk("resource_access_role"); ok { - visInput.ResourceAccessRole = aws.String(v.(string)) - } + if v, ok := d.GetOk("resource_access_role"); ok { + input.ResourceAccessRole = aws.String(v.(string)) + } - _, err = conn.UpdateProjectVisibilityWithContext(ctx, visInput) - if err != nil { - return sdkdiag.AppendErrorf(diags, "updating CodeBuild project (%s) visibility: %s", d.Id(), err) + _, err = conn.UpdateProjectVisibility(ctx, input) + + if err != nil { + return sdkdiag.AppendErrorf(diags, "updating CodeBuild Project (%s) visibility: %s", d.Id(), err) + } } } + return append(diags, resourceProjectRead(ctx, d, meta)...) } -func expandProjectSecondarySourceVersions(ssv *schema.Set) []*codebuild.ProjectSourceVersion { - sourceVersions := make([]*codebuild.ProjectSourceVersion, 0) +func resourceProjectRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { + var diags diag.Diagnostics + conn := meta.(*conns.AWSClient).CodeBuildClient(ctx) - rawSourceVersions := ssv.List() - if len(rawSourceVersions) == 0 { - return nil - } + project, err := findProjectByNameOrARN(ctx, conn, d.Id()) - for _, config := range rawSourceVersions { - sourceVersion := expandProjectSourceVersion(config.(map[string]interface{})) - sourceVersions = append(sourceVersions, &sourceVersion) + if !d.IsNewResource() && tfresource.NotFound(err) { + log.Printf("[WARN] CodeBuild Project (%s) not found, removing from state", d.Id()) + d.SetId("") + return diags } - return sourceVersions -} + if err != nil { + return sdkdiag.AppendErrorf(diags, "reading CodeBuild Project (%s): %s", d.Id(), err) + } -func expandProjectSourceVersion(data map[string]interface{}) codebuild.ProjectSourceVersion { - sourceVersion := codebuild.ProjectSourceVersion{ - SourceIdentifier: aws.String(data["source_identifier"].(string)), - SourceVersion: aws.String(data["source_version"].(string)), + d.Set("arn", project.Arn) + if project.Artifacts != nil { + if err := d.Set("artifacts", []interface{}{flattenProjectArtifacts(*project.Artifacts)}); err != nil { + return sdkdiag.AppendErrorf(diags, "setting artifacts: %s", err) + } + } else { + d.Set("artifacts", nil) + } + if project.Badge != nil { + d.Set("badge_enabled", project.Badge.BadgeEnabled) + d.Set("badge_url", project.Badge.BadgeRequestUrl) + } else { + d.Set("badge_enabled", false) + d.Set("badge_url", "") + } + if err := d.Set("build_batch_config", flattenBuildBatchConfig(project.BuildBatchConfig)); err != nil { + return sdkdiag.AppendErrorf(diags, "setting build_batch_config: %s", err) + } + d.Set("build_timeout", project.TimeoutInMinutes) + if err := d.Set("cache", flattenProjectCache(project.Cache)); err != nil { + return sdkdiag.AppendErrorf(diags, "setting cache: %s", err) + } + d.Set("concurrent_build_limit", project.ConcurrentBuildLimit) + d.Set("description", project.Description) + d.Set("encryption_key", project.EncryptionKey) + if err := d.Set("environment", flattenProjectEnvironment(project.Environment)); err != nil { + return sdkdiag.AppendErrorf(diags, "setting environment: %s", err) + } + if err := d.Set("file_system_locations", flattenProjectFileSystemLocations(project.FileSystemLocations)); err != nil { + return sdkdiag.AppendErrorf(diags, "setting file_system_locations: %s", err) + } + if err := d.Set("logs_config", flattenLogsConfig(project.LogsConfig)); err != nil { + return sdkdiag.AppendErrorf(diags, "setting logs_config: %s", err) + } + d.Set("name", project.Name) + d.Set("project_visibility", project.ProjectVisibility) + d.Set("public_project_alias", project.PublicProjectAlias) + d.Set("resource_access_role", project.ResourceAccessRole) + d.Set("queued_timeout", project.QueuedTimeoutInMinutes) + if err := d.Set("secondary_artifacts", flattenProjectSecondaryArtifacts(project.SecondaryArtifacts)); err != nil { + return sdkdiag.AppendErrorf(diags, "setting secondary_artifacts: %s", err) + } + if err := d.Set("secondary_sources", flattenProjectSecondarySources(project.SecondarySources)); err != nil { + return sdkdiag.AppendErrorf(diags, "setting secondary_sources: %s", err) + } + if err := d.Set("secondary_source_version", flattenProjectSecondarySourceVersions(project.SecondarySourceVersions)); err != nil { + return sdkdiag.AppendErrorf(diags, "setting secondary_source_version: %s", err) + } + d.Set("service_role", project.ServiceRole) + if project.Source != nil { + if err := d.Set("source", []interface{}{flattenProjectSource(*project.Source)}); err != nil { + return sdkdiag.AppendErrorf(diags, "setting source: %s", err) + } + } else { + d.Set("source", nil) + } + d.Set("source_version", project.SourceVersion) + if err := d.Set("vpc_config", flattenVPCConfig(project.VpcConfig)); err != nil { + return sdkdiag.AppendErrorf(diags, "setting vpc_config: %s", err) } - return sourceVersion -} + setTagsOut(ctx, project.Tags) -func expandProjectFileSystemLocations(d *schema.ResourceData) []*codebuild.ProjectFileSystemLocation { - fileSystemLocations := make([]*codebuild.ProjectFileSystemLocation, 0) + return diags +} - configsList := d.Get("file_system_locations").(*schema.Set).List() +func resourceProjectUpdate(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { + var diags diag.Diagnostics + conn := meta.(*conns.AWSClient).CodeBuildClient(ctx) - if len(configsList) == 0 { - return nil - } + if d.HasChanges("project_visibility", "resource_access_role") { + input := &codebuild.UpdateProjectVisibilityInput{ + ProjectArn: aws.String(d.Id()), + ProjectVisibility: types.ProjectVisibilityType(d.Get("project_visibility").(string)), + } - for _, config := range configsList { - art := expandProjectFileSystemLocation(config.(map[string]interface{})) - fileSystemLocations = append(fileSystemLocations, &art) - } + if v, ok := d.GetOk("resource_access_role"); ok { + input.ResourceAccessRole = aws.String(v.(string)) + } - return fileSystemLocations -} + _, err := conn.UpdateProjectVisibility(ctx, input) -func expandProjectFileSystemLocation(data map[string]interface{}) codebuild.ProjectFileSystemLocation { - projectFileSystemLocation := codebuild.ProjectFileSystemLocation{ - Type: aws.String(data["type"].(string)), + if err != nil { + return sdkdiag.AppendErrorf(diags, "updating CodeBuild Project (%s) visibility: %s", d.Id(), err) + } } - if data["identifier"].(string) != "" { - projectFileSystemLocation.Identifier = aws.String(data["identifier"].(string)) - } + if d.HasChangesExcept("project_visibility", "resource_access_role") { + input := &codebuild.UpdateProjectInput{ + Name: aws.String(d.Get("name").(string)), + } - if data["location"].(string) != "" { - projectFileSystemLocation.Location = aws.String(data["location"].(string)) - } + if d.HasChange("artifacts") { + if v, ok := d.GetOk("artifacts"); ok && len(v.([]interface{})) > 0 && v.([]interface{})[0] != nil { + input.Artifacts = expandProjectArtifacts(v.([]interface{})[0].(map[string]interface{})) + } + } - if data["mount_options"].(string) != "" { - projectFileSystemLocation.MountOptions = aws.String(data["mount_options"].(string)) - } + if d.HasChange("badge_enabled") { + input.BadgeEnabled = aws.Bool(d.Get("badge_enabled").(bool)) + } - if data["mount_point"].(string) != "" { - projectFileSystemLocation.MountPoint = aws.String(data["mount_point"].(string)) - } + if d.HasChange("build_batch_config") { + if v, ok := d.GetOk("build_batch_config"); ok && len(v.([]interface{})) > 0 && v.([]interface{})[0] != nil { + input.BuildBatchConfig = expandProjectBuildBatchConfig(v.([]interface{})[0].(map[string]interface{})) + } else { + input.BuildBatchConfig = &types.ProjectBuildBatchConfig{} + } + } - return projectFileSystemLocation -} + if d.HasChange("cache") { + if v, ok := d.GetOk("cache"); ok && len(v.([]interface{})) > 0 && v.([]interface{})[0] != nil { + input.Cache = expandProjectCache(v.([]interface{})[0].(map[string]interface{})) + } else { + input.Cache = &types.ProjectCache{ + Type: types.CacheTypeNoCache, + } + } + } -func expandProjectSecondaryArtifacts(d *schema.ResourceData) []*codebuild.ProjectArtifacts { - artifacts := make([]*codebuild.ProjectArtifacts, 0) + if d.HasChange("concurrent_build_limit") { + input.ConcurrentBuildLimit = aws.Int32(int32(d.Get("concurrent_build_limit").(int))) + } - configsList := d.Get("secondary_artifacts").(*schema.Set).List() + if d.HasChange("description") { + input.Description = aws.String(d.Get("description").(string)) + } - if len(configsList) == 0 { - return nil - } + if d.HasChange("encryption_key") { + input.EncryptionKey = aws.String(d.Get("encryption_key").(string)) + } - for _, config := range configsList { - art := expandProjectArtifactData(config.(map[string]interface{})) - artifacts = append(artifacts, &art) - } + if d.HasChange("environment") { + if v, ok := d.GetOk("environment"); ok && len(v.([]interface{})) > 0 && v.([]interface{})[0] != nil { + input.Environment = expandProjectEnvironment(v.([]interface{})[0].(map[string]interface{})) + } + } - return artifacts -} + if d.HasChange("file_system_locations") { + if v, ok := d.GetOk("file_system_locations"); ok && v.(*schema.Set).Len() > 0 { + input.FileSystemLocations = expandProjectFileSystemLocations(v.(*schema.Set).List()) + } + } -func expandProjectArtifacts(d *schema.ResourceData) codebuild.ProjectArtifacts { - configs := d.Get("artifacts").([]interface{}) - data := configs[0].(map[string]interface{}) + if d.HasChange("logs_config") { + if v, ok := d.GetOk("logs_config"); ok && len(v.([]interface{})) > 0 && v.([]interface{})[0] != nil { + input.LogsConfig = expandProjectLogsConfig(v.([]interface{})[0].(map[string]interface{})) + } + } - return expandProjectArtifactData(data) -} + if d.HasChange("queued_timeout") { + input.QueuedTimeoutInMinutes = aws.Int32(int32(d.Get("queued_timeout").(int))) + } -func expandProjectArtifactData(data map[string]interface{}) codebuild.ProjectArtifacts { - artifactType := data["type"].(string) + if d.HasChange("secondary_artifacts") { + if v, ok := d.GetOk("secondary_artifacts"); ok && v.(*schema.Set).Len() > 0 { + input.SecondaryArtifacts = expandProjectSecondaryArtifacts(v.(*schema.Set).List()) + } else { + input.SecondaryArtifacts = []types.ProjectArtifacts{} + } + } - projectArtifacts := codebuild.ProjectArtifacts{ - Type: aws.String(artifactType), - } + if d.HasChange("secondary_sources") { + if v, ok := d.GetOk("secondary_sources"); ok && v.(*schema.Set).Len() > 0 { + input.SecondarySources = expandProjectSecondarySources(v.(*schema.Set).List()) + } else { + input.SecondarySources = []types.ProjectSource{} + } + } - // Only valid for S3 and CODEPIPELINE artifacts types - // InvalidInputException: Invalid artifacts: artifact type NO_ARTIFACTS should have null encryptionDisabled - if artifactType == codebuild.ArtifactsTypeS3 || artifactType == codebuild.ArtifactsTypeCodepipeline { - projectArtifacts.EncryptionDisabled = aws.Bool(data["encryption_disabled"].(bool)) - } + if d.HasChange("secondary_source_version") { + if v, ok := d.GetOk("secondary_source_version"); ok && v.(*schema.Set).Len() > 0 { + input.SecondarySourceVersions = expandProjectSecondarySourceVersions(v.(*schema.Set).List()) + } else { + input.SecondarySourceVersions = []types.ProjectSourceVersion{} + } + } - if v, ok := data["artifact_identifier"].(string); ok && v != "" { - projectArtifacts.ArtifactIdentifier = aws.String(v) - } + if d.HasChange("service_role") { + input.ServiceRole = aws.String(d.Get("service_role").(string)) + } - if v, ok := data["location"].(string); ok && v != "" { - projectArtifacts.Location = aws.String(v) - } + if d.HasChange("source") { + if v, ok := d.GetOk("source"); ok && len(v.([]interface{})) > 0 && v.([]interface{})[0] != nil { + input.Source = expandProjectSource(v.([]interface{})[0].(map[string]interface{})) + } + } - if v, ok := data["name"].(string); ok && v != "" { - projectArtifacts.Name = aws.String(v) - } + if d.HasChange("source_version") { + input.SourceVersion = aws.String(d.Get("source_version").(string)) + } - if v, ok := data["namespace_type"].(string); ok && v != "" { - projectArtifacts.NamespaceType = aws.String(v) - } + if d.HasChange("build_timeout") { + input.TimeoutInMinutes = aws.Int32(int32(d.Get("build_timeout").(int))) + } - if v, ok := data["override_artifact_name"]; ok { - projectArtifacts.OverrideArtifactName = aws.Bool(v.(bool)) - } + if d.HasChange("vpc_config") { + if v, ok := d.GetOk("environvpc_configment"); ok && len(v.([]interface{})) > 0 && v.([]interface{})[0] != nil { + input.VpcConfig = expandVPCConfig(v.([]interface{})[0].(map[string]interface{})) + } + } - if v, ok := data["packaging"].(string); ok && v != "" { - projectArtifacts.Packaging = aws.String(v) - } + // The documentation clearly says "The replacement set of tags for this build project." + // But its a slice of pointers so if not set for every update, they get removed. + input.Tags = getTagsIn(ctx) - if v, ok := data["path"].(string); ok && v != "" { - projectArtifacts.Path = aws.String(v) - } + _, err := tfresource.RetryWhenIsAErrorMessageContains[*types.InvalidInputException](ctx, propagationTimeout, func() (interface{}, error) { + return conn.UpdateProject(ctx, input) + }, "ot authorized to perform") - if v, ok := data["bucket_owner_access"].(string); ok && v != "" { - projectArtifacts.BucketOwnerAccess = aws.String(v) + if err != nil { + return sdkdiag.AppendErrorf(diags, "updating CodeBuild Project (%s): %s", d.Id(), err) + } } - return projectArtifacts + return append(diags, resourceProjectRead(ctx, d, meta)...) } -func expandProjectCache(s []interface{}) *codebuild.ProjectCache { - var projectCache *codebuild.ProjectCache +func resourceProjectDelete(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { + var diags diag.Diagnostics + conn := meta.(*conns.AWSClient).CodeBuildClient(ctx) - data := s[0].(map[string]interface{}) + log.Printf("[INFO] Deleting CodeBuild Project: %s", d.Id()) + _, err := conn.DeleteProject(ctx, &codebuild.DeleteProjectInput{ + Name: aws.String(d.Id()), + }) - projectCache = &codebuild.ProjectCache{ - Type: aws.String(data["type"].(string)), + if err != nil { + return sdkdiag.AppendErrorf(diags, "deleting CodeBuild Project (%s): %s", d.Id(), err) } - if v, ok := data["location"]; ok { - projectCache.Location = aws.String(v.(string)) - } + return diags +} - if cacheType := data["type"]; cacheType == codebuild.CacheTypeLocal { - if modes, modesOk := data["modes"]; modesOk { - modesStrings := modes.([]interface{}) - projectCache.Modes = flex.ExpandStringList(modesStrings) - } +func findProjectByNameOrARN(ctx context.Context, conn *codebuild.Client, nameOrARN string) (*types.Project, error) { + input := &codebuild.BatchGetProjectsInput{ + Names: tfslices.Of(nameOrARN), } - return projectCache + return findProject(ctx, conn, input) } -func expandProjectEnvironment(d *schema.ResourceData) *codebuild.ProjectEnvironment { - configs := d.Get("environment").([]interface{}) +func findProject(ctx context.Context, conn *codebuild.Client, input *codebuild.BatchGetProjectsInput) (*types.Project, error) { + output, err := findProjects(ctx, conn, input) - envConfig := configs[0].(map[string]interface{}) - - projectEnv := &codebuild.ProjectEnvironment{ - PrivilegedMode: aws.Bool(envConfig["privileged_mode"].(bool)), + if err != nil { + return nil, err } - if v := envConfig["compute_type"]; v != nil { - projectEnv.ComputeType = aws.String(v.(string)) - } + return tfresource.AssertSingleValueResult(output) +} - if v := envConfig["image"]; v != nil { - projectEnv.Image = aws.String(v.(string)) - } +func findProjects(ctx context.Context, conn *codebuild.Client, input *codebuild.BatchGetProjectsInput) ([]types.Project, error) { + output, err := conn.BatchGetProjects(ctx, input) - if v := envConfig["type"]; v != nil { - projectEnv.Type = aws.String(v.(string)) + if err != nil { + return nil, err } - if v, ok := envConfig["certificate"]; ok && v.(string) != "" { - projectEnv.Certificate = aws.String(v.(string)) + if output == nil { + return nil, tfresource.NewEmptyResultError(input) } - if v := envConfig["image_pull_credentials_type"]; v != nil { - projectEnv.ImagePullCredentialsType = aws.String(v.(string)) - } + return output.Projects, nil +} - if v, ok := envConfig["registry_credential"]; ok && len(v.([]interface{})) > 0 { - config := v.([]interface{})[0].(map[string]interface{}) +func expandProjectSecondarySourceVersions(tfList []interface{}) []types.ProjectSourceVersion { + if len(tfList) == 0 { + return nil + } - projectRegistryCredential := &codebuild.RegistryCredential{} + apiObjects := make([]types.ProjectSourceVersion, 0) - if v, ok := config["credential"]; ok && v.(string) != "" { - projectRegistryCredential.Credential = aws.String(v.(string)) + for _, tfMapRaw := range tfList { + tfMap, ok := tfMapRaw.(map[string]interface{}) + if !ok { + continue } - if v, ok := config["credential_provider"]; ok && v.(string) != "" { - projectRegistryCredential.CredentialProvider = aws.String(v.(string)) + apiObject := expandProjectSourceVersion(tfMap) + + if apiObject == nil { + continue } - projectEnv.RegistryCredential = projectRegistryCredential + apiObjects = append(apiObjects, *apiObject) } - if v := envConfig["environment_variable"]; v != nil { - envVariables := v.([]interface{}) - if len(envVariables) > 0 { - projectEnvironmentVariables := make([]*codebuild.EnvironmentVariable, 0, len(envVariables)) - - for _, envVariablesConfig := range envVariables { - config := envVariablesConfig.(map[string]interface{}) - - projectEnvironmentVar := &codebuild.EnvironmentVariable{} - - if v := config["name"].(string); v != "" { - projectEnvironmentVar.Name = &v - } - - if v, ok := config["value"].(string); ok { - projectEnvironmentVar.Value = &v - } - - if v := config["type"].(string); v != "" { - projectEnvironmentVar.Type = &v - } + return apiObjects +} - projectEnvironmentVariables = append(projectEnvironmentVariables, projectEnvironmentVar) - } +func expandProjectSourceVersion(tfMap map[string]interface{}) *types.ProjectSourceVersion { + if tfMap == nil { + return nil + } - projectEnv.EnvironmentVariables = projectEnvironmentVariables - } + apiObject := &types.ProjectSourceVersion{ + SourceIdentifier: aws.String(tfMap["source_identifier"].(string)), + SourceVersion: aws.String(tfMap["source_version"].(string)), } - return projectEnv + return apiObject } -func expandProjectLogsConfig(d *schema.ResourceData) *codebuild.LogsConfig { - logsConfig := &codebuild.LogsConfig{} +func expandProjectFileSystemLocations(tfList []interface{}) []types.ProjectFileSystemLocation { + if len(tfList) == 0 { + return nil + } - if v, ok := d.GetOk("logs_config"); ok && len(v.([]interface{})) > 0 && v.([]interface{})[0] != nil { - configList := v.([]interface{}) - data := configList[0].(map[string]interface{}) + apiObjects := make([]types.ProjectFileSystemLocation, 0) - if v, ok := data["cloudwatch_logs"]; ok { - logsConfig.CloudWatchLogs = expandCloudWatchLogsConfig(v.([]interface{})) + for _, tfMapRaw := range tfList { + tfMap, ok := tfMapRaw.(map[string]interface{}) + if !ok { + continue } - if v, ok := data["s3_logs"]; ok { - logsConfig.S3Logs = expandS3LogsConfig(v.([]interface{})) - } - } + apiObject := expandProjectFileSystemLocation(tfMap) - if logsConfig.CloudWatchLogs == nil { - logsConfig.CloudWatchLogs = &codebuild.CloudWatchLogsConfig{ - Status: aws.String(codebuild.LogsConfigStatusTypeEnabled), + if apiObject == nil { + continue } - } - if logsConfig.S3Logs == nil { - logsConfig.S3Logs = &codebuild.S3LogsConfig{ - Status: aws.String(codebuild.LogsConfigStatusTypeDisabled), - } + apiObjects = append(apiObjects, *apiObject) } - return logsConfig + return apiObjects } -func expandBuildBatchConfig(d *schema.ResourceData) *codebuild.ProjectBuildBatchConfig { - configs, ok := d.Get("build_batch_config").([]interface{}) - if !ok || len(configs) == 0 || configs[0] == nil { +func expandProjectFileSystemLocation(tfMap map[string]interface{}) *types.ProjectFileSystemLocation { + if tfMap == nil { return nil } - data := configs[0].(map[string]interface{}) - - projectBuildBatchConfig := &codebuild.ProjectBuildBatchConfig{ - Restrictions: expandBatchRestrictions(data), - ServiceRole: aws.String(data["service_role"].(string)), + apiObject := &types.ProjectFileSystemLocation{ + Type: types.FileSystemType(tfMap["type"].(string)), } - if v, ok := data["combine_artifacts"]; ok { - projectBuildBatchConfig.CombineArtifacts = aws.Bool(v.(bool)) + if tfMap["identifier"].(string) != "" { + apiObject.Identifier = aws.String(tfMap["identifier"].(string)) } - if v, ok := data["timeout_in_mins"]; ok && v != 0 { - projectBuildBatchConfig.TimeoutInMins = aws.Int64(int64(v.(int))) + if tfMap["location"].(string) != "" { + apiObject.Location = aws.String(tfMap["location"].(string)) } - return projectBuildBatchConfig -} - -func expandBatchRestrictions(data map[string]interface{}) *codebuild.BatchRestrictions { - if v, ok := data["restrictions"]; !ok || len(v.([]interface{})) == 0 || v.([]interface{})[0] == nil { - return nil + if tfMap["mount_options"].(string) != "" { + apiObject.MountOptions = aws.String(tfMap["mount_options"].(string)) } - restrictionsData := data["restrictions"].([]interface{})[0].(map[string]interface{}) - - restrictions := &codebuild.BatchRestrictions{} - if v, ok := restrictionsData["compute_types_allowed"]; ok && len(v.([]interface{})) != 0 { - restrictions.ComputeTypesAllowed = flex.ExpandStringList(v.([]interface{})) + if tfMap["mount_point"].(string) != "" { + apiObject.MountPoint = aws.String(tfMap["mount_point"].(string)) } - if v, ok := restrictionsData["maximum_builds_allowed"]; ok && v != 0 { - restrictions.MaximumBuildsAllowed = aws.Int64(int64(v.(int))) - } - - return restrictions + return apiObject } -func expandCloudWatchLogsConfig(configList []interface{}) *codebuild.CloudWatchLogsConfig { - if len(configList) == 0 || configList[0] == nil { +func expandProjectSecondaryArtifacts(tfList []interface{}) []types.ProjectArtifacts { + if len(tfList) == 0 { return nil } - data := configList[0].(map[string]interface{}) + apiObjects := make([]types.ProjectArtifacts, 0) - status := data["status"].(string) + for _, tfMapRaw := range tfList { + tfMap, ok := tfMapRaw.(map[string]interface{}) + if !ok { + continue + } - cloudWatchLogsConfig := &codebuild.CloudWatchLogsConfig{ - Status: aws.String(status), - } + apiObject := expandProjectArtifacts(tfMap) - if v, ok := data["group_name"]; ok { - groupName := v.(string) - if len(groupName) > 0 { - cloudWatchLogsConfig.GroupName = aws.String(groupName) + if apiObject == nil { + continue } - } - if v, ok := data["stream_name"]; ok { - streamName := v.(string) - if len(streamName) > 0 { - cloudWatchLogsConfig.StreamName = aws.String(streamName) - } + apiObjects = append(apiObjects, *apiObject) } - return cloudWatchLogsConfig + return apiObjects } -func expandS3LogsConfig(configList []interface{}) *codebuild.S3LogsConfig { - if len(configList) == 0 || configList[0] == nil { +func expandProjectArtifacts(tfMap map[string]interface{}) *types.ProjectArtifacts { + if tfMap == nil { return nil } - data := configList[0].(map[string]interface{}) + artifactType := types.ArtifactsType(tfMap["type"].(string)) + apiObject := &types.ProjectArtifacts{ + Type: artifactType, + } - status := data["status"].(string) + // Only valid for S3 and CODEPIPELINE artifacts types + // InvalidInputException: Invalid artifacts: artifact type NO_ARTIFACTS should have null encryptionDisabled + if artifactType == types.ArtifactsTypeS3 || artifactType == types.ArtifactsTypeCodepipeline { + apiObject.EncryptionDisabled = aws.Bool(tfMap["encryption_disabled"].(bool)) + } - s3LogsConfig := &codebuild.S3LogsConfig{ - Status: aws.String(status), + if v, ok := tfMap["artifact_identifier"].(string); ok && v != "" { + apiObject.ArtifactIdentifier = aws.String(v) } - if v, ok := data["location"].(string); ok && v != "" { - s3LogsConfig.Location = aws.String(v) + if v, ok := tfMap["bucket_owner_access"].(string); ok && v != "" { + apiObject.BucketOwnerAccess = types.BucketOwnerAccess(v) } - if v, ok := data["bucket_owner_access"].(string); ok && v != "" { - s3LogsConfig.BucketOwnerAccess = aws.String(v) + if v, ok := tfMap["location"].(string); ok && v != "" { + apiObject.Location = aws.String(v) } - s3LogsConfig.EncryptionDisabled = aws.Bool(data["encryption_disabled"].(bool)) + if v, ok := tfMap["name"].(string); ok && v != "" { + apiObject.Name = aws.String(v) + } - return s3LogsConfig -} + if v, ok := tfMap["namespace_type"].(string); ok && v != "" { + apiObject.NamespaceType = types.ArtifactNamespace(v) + } -func expandVPCConfig(rawVpcConfig []interface{}) *codebuild.VpcConfig { - vpcConfig := codebuild.VpcConfig{} - if len(rawVpcConfig) == 0 || rawVpcConfig[0] == nil { - return &vpcConfig + if v, ok := tfMap["override_artifact_name"]; ok { + apiObject.OverrideArtifactName = aws.Bool(v.(bool)) } - data := rawVpcConfig[0].(map[string]interface{}) - vpcConfig.VpcId = aws.String(data["vpc_id"].(string)) - vpcConfig.Subnets = flex.ExpandStringSet(data["subnets"].(*schema.Set)) - vpcConfig.SecurityGroupIds = flex.ExpandStringSet(data["security_group_ids"].(*schema.Set)) + if v, ok := tfMap["packaging"].(string); ok && v != "" { + apiObject.Packaging = types.ArtifactPackaging(v) + } - return &vpcConfig -} + if v, ok := tfMap["path"].(string); ok && v != "" { + apiObject.Path = aws.String(v) + } -func expandProjectSecondarySources(d *schema.ResourceData) []*codebuild.ProjectSource { - configs := d.Get("secondary_sources").(*schema.Set).List() + return apiObject +} - if len(configs) == 0 { +func expandProjectCache(tfMap map[string]interface{}) *types.ProjectCache { + if tfMap == nil { return nil } - sources := make([]*codebuild.ProjectSource, 0) + cacheType := types.CacheType(tfMap["type"].(string)) + apiObject := &types.ProjectCache{ + Type: cacheType, + } - for _, config := range configs { - source := expandProjectSourceData(config.(map[string]interface{})) - sources = append(sources, &source) + if v, ok := tfMap["location"]; ok { + apiObject.Location = aws.String(v.(string)) } - return sources + if cacheType == types.CacheTypeLocal { + if v, ok := tfMap["modes"].([]interface{}); ok && len(v) > 0 { + apiObject.Modes = flex.ExpandStringyValueList[types.CacheMode](v) + } + } + + return apiObject } -func expandProjectSource(d *schema.ResourceData) codebuild.ProjectSource { - configs := d.Get("source").([]interface{}) +func expandProjectEnvironment(tfMap map[string]interface{}) *types.ProjectEnvironment { + if tfMap == nil { + return nil + } - data := configs[0].(map[string]interface{}) - return expandProjectSourceData(data) -} + apiObject := &types.ProjectEnvironment{ + PrivilegedMode: aws.Bool(tfMap["privileged_mode"].(bool)), + } -func expandProjectSourceData(data map[string]interface{}) codebuild.ProjectSource { - sourceType := data["type"].(string) + if v, ok := tfMap["certificate"].(string); ok && v != "" { + apiObject.Certificate = aws.String(v) + } - projectSource := codebuild.ProjectSource{ - Buildspec: aws.String(data["buildspec"].(string)), - GitCloneDepth: aws.Int64(int64(data["git_clone_depth"].(int))), - InsecureSsl: aws.Bool(data["insecure_ssl"].(bool)), - Type: aws.String(sourceType), + if v, ok := tfMap["compute_type"].(string); ok && v != "" { + apiObject.ComputeType = types.ComputeType(v) } - if data["source_identifier"] != nil { - projectSource.SourceIdentifier = aws.String(data["source_identifier"].(string)) + if v, ok := tfMap["image"].(string); ok && v != "" { + apiObject.Image = aws.String(v) } - if data["location"].(string) != "" { - projectSource.Location = aws.String(data["location"].(string)) + if v, ok := tfMap["image_pull_credentials_type"].(string); ok && v != "" { + apiObject.ImagePullCredentialsType = types.ImagePullCredentialsType(v) } - // Only valid for BITBUCKET, GITHUB, and GITHUB_ENTERPRISE source types, e.g. - // InvalidInputException: Source type NO_SOURCE does not support ReportBuildStatus - if sourceType == codebuild.SourceTypeBitbucket || sourceType == codebuild.SourceTypeGithub || sourceType == codebuild.SourceTypeGithubEnterprise { - projectSource.ReportBuildStatus = aws.Bool(data["report_build_status"].(bool)) + if v, ok := tfMap["type"].(string); ok && v != "" { + apiObject.Type = types.EnvironmentType(v) } - // Only valid for CODECOMMIT, GITHUB, GITHUB_ENTERPRISE, BITBUCKET source types. - if sourceType == codebuild.SourceTypeCodecommit || sourceType == codebuild.SourceTypeGithub || sourceType == codebuild.SourceTypeGithubEnterprise || sourceType == codebuild.SourceTypeBitbucket { - if v, ok := data["git_submodules_config"]; ok && len(v.([]interface{})) > 0 { - config := v.([]interface{})[0].(map[string]interface{}) + if v, ok := tfMap["registry_credential"].([]interface{}); ok && len(v) > 0 && v[0] != nil { + tfMap := v[0].(map[string]interface{}) - gitSubmodulesConfig := &codebuild.GitSubmodulesConfig{} + projectRegistryCredential := &types.RegistryCredential{} - if v, ok := config["fetch_submodules"]; ok { - gitSubmodulesConfig.FetchSubmodules = aws.Bool(v.(bool)) - } + if v, ok := tfMap["credential"]; ok && v.(string) != "" { + projectRegistryCredential.Credential = aws.String(v.(string)) + } - projectSource.GitSubmodulesConfig = gitSubmodulesConfig + if v, ok := tfMap["credential_provider"]; ok && v.(string) != "" { + projectRegistryCredential.CredentialProvider = types.CredentialProviderType(v.(string)) } + + apiObject.RegistryCredential = projectRegistryCredential } - // Only valid for BITBUCKET, GITHUB, GITHUB_ENTERPRISE source types. - if sourceType == codebuild.SourceTypeBitbucket || sourceType == codebuild.SourceTypeGithub || sourceType == codebuild.SourceTypeGithubEnterprise { - if v, ok := data["build_status_config"]; ok && len(v.([]interface{})) > 0 { - config := v.([]interface{})[0].(map[string]interface{}) + if v, ok := tfMap["environment_variable"].([]interface{}); ok && len(v) > 0 { + projectEnvironmentVariables := make([]types.EnvironmentVariable, 0) + + for _, tfMapRaw := range v { + tfMap, ok := tfMapRaw.(map[string]interface{}) + if !ok { + continue + } + + projectEnvironmentVar := types.EnvironmentVariable{} - buildStatusConfig := &codebuild.BuildStatusConfig{} + if v := tfMap["name"].(string); v != "" { + projectEnvironmentVar.Name = aws.String(v) + } - if v, ok := config["context"]; ok { - buildStatusConfig.Context = aws.String(v.(string)) + if v := tfMap["type"].(string); v != "" { + projectEnvironmentVar.Type = types.EnvironmentVariableType(v) } - if v, ok := config["target_url"]; ok { - buildStatusConfig.TargetUrl = aws.String(v.(string)) + + if v, ok := tfMap["value"].(string); ok { + projectEnvironmentVar.Value = aws.String(v) } - projectSource.BuildStatusConfig = buildStatusConfig + projectEnvironmentVariables = append(projectEnvironmentVariables, projectEnvironmentVar) } + + apiObject.EnvironmentVariables = projectEnvironmentVariables } - return projectSource + return apiObject } -func resourceProjectRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - var diags diag.Diagnostics - conn := meta.(*conns.AWSClient).CodeBuildConn(ctx) +func expandProjectLogsConfig(tfMap map[string]interface{}) *types.LogsConfig { + if tfMap == nil { + return nil + } - project, err := FindProjectByARN(ctx, conn, d.Id()) + apiObject := &types.LogsConfig{} - if !d.IsNewResource() && tfresource.NotFound(err) { - log.Printf("[WARN] CodeBuild Project (%s) not found, removing from state", d.Id()) - d.SetId("") - return diags + if v, ok := tfMap["cloudwatch_logs"].([]interface{}); ok && len(v) > 0 && v[0] != nil { + apiObject.CloudWatchLogs = expandCloudWatchLogsConfig(v[0].(map[string]interface{})) } - if err != nil { - return sdkdiag.AppendErrorf(diags, "Listing CodeBuild Projects: %s", err) + if v, ok := tfMap["s3_logs"].([]interface{}); ok && len(v) > 0 && v[0] != nil { + apiObject.S3Logs = expandS3LogsConfig(v[0].(map[string]interface{})) } - if err := d.Set("artifacts", flattenProjectArtifacts(project.Artifacts)); err != nil { - return sdkdiag.AppendErrorf(diags, "setting artifacts: %s", err) + if apiObject.CloudWatchLogs == nil { + apiObject.CloudWatchLogs = &types.CloudWatchLogsConfig{ + Status: types.LogsConfigStatusTypeEnabled, + } } - if err := d.Set("environment", flattenProjectEnvironment(project.Environment)); err != nil { - return sdkdiag.AppendErrorf(diags, "setting environment: %s", err) + if apiObject.S3Logs == nil { + apiObject.S3Logs = &types.S3LogsConfig{ + Status: types.LogsConfigStatusTypeDisabled, + } } - if err := d.Set("file_system_locations", flattenProjectFileSystemLocations(project.FileSystemLocations)); err != nil { - return sdkdiag.AppendErrorf(diags, "setting file_system_locations: %s", err) - } + return apiObject +} - if err := d.Set("cache", flattenProjectCache(project.Cache)); err != nil { - return sdkdiag.AppendErrorf(diags, "setting cache: %s", err) +func expandCloudWatchLogsConfig(tfMap map[string]interface{}) *types.CloudWatchLogsConfig { + if tfMap == nil { + return nil } - if err := d.Set("logs_config", flattenLogsConfig(project.LogsConfig)); err != nil { - return sdkdiag.AppendErrorf(diags, "setting logs_config: %s", err) + apiObject := &types.CloudWatchLogsConfig{ + Status: types.LogsConfigStatusType(tfMap["status"].(string)), } - if err := d.Set("secondary_artifacts", flattenProjectSecondaryArtifacts(project.SecondaryArtifacts)); err != nil { - return sdkdiag.AppendErrorf(diags, "setting secondary_artifacts: %s", err) + if v, ok := tfMap["group_name"].(string); ok && v != "" { + apiObject.GroupName = aws.String(v) } - if err := d.Set("secondary_sources", flattenProjectSecondarySources(project.SecondarySources)); err != nil { - return sdkdiag.AppendErrorf(diags, "setting secondary_sources: %s", err) + if v, ok := tfMap["stream_name"].(string); ok && v != "" { + apiObject.StreamName = aws.String(v) } - if err := d.Set("secondary_source_version", flattenProjectSecondarySourceVersions(project.SecondarySourceVersions)); err != nil { - return sdkdiag.AppendErrorf(diags, "setting secondary_source_version: %s", err) - } + return apiObject +} - if err := d.Set("source", flattenProjectSource(project.Source)); err != nil { - return sdkdiag.AppendErrorf(diags, "setting source: %s", err) +func expandS3LogsConfig(tfMap map[string]interface{}) *types.S3LogsConfig { + if tfMap == nil { + return nil } - if err := d.Set("vpc_config", flattenVPCConfig(project.VpcConfig)); err != nil { - return sdkdiag.AppendErrorf(diags, "setting vpc_config: %s", err) + apiObject := &types.S3LogsConfig{ + EncryptionDisabled: aws.Bool(tfMap["encryption_disabled"].(bool)), + Status: types.LogsConfigStatusType(tfMap["status"].(string)), } - if err := d.Set("build_batch_config", flattenBuildBatchConfig(project.BuildBatchConfig)); err != nil { - return sdkdiag.AppendErrorf(diags, "setting build_batch_config: %s", err) + if v, ok := tfMap["bucket_owner_access"].(string); ok && v != "" { + apiObject.BucketOwnerAccess = types.BucketOwnerAccess(v) } - d.Set("arn", project.Arn) - d.Set("concurrent_build_limit", project.ConcurrentBuildLimit) - d.Set("description", project.Description) - d.Set("encryption_key", project.EncryptionKey) - d.Set("name", project.Name) - d.Set("service_role", project.ServiceRole) - d.Set("source_version", project.SourceVersion) - d.Set("build_timeout", project.TimeoutInMinutes) - d.Set("project_visibility", project.ProjectVisibility) - d.Set("public_project_alias", project.PublicProjectAlias) - d.Set("resource_access_role", project.ResourceAccessRole) - d.Set("queued_timeout", project.QueuedTimeoutInMinutes) - if project.Badge != nil { - d.Set("badge_enabled", project.Badge.BadgeEnabled) - d.Set("badge_url", project.Badge.BadgeRequestUrl) - } else { - d.Set("badge_enabled", false) - d.Set("badge_url", "") + if v, ok := tfMap["location"].(string); ok && v != "" { + apiObject.Location = aws.String(v) } - setTagsOut(ctx, project.Tags) - - return diags + return apiObject } -func resourceProjectUpdate(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - var diags diag.Diagnostics - conn := meta.(*conns.AWSClient).CodeBuildConn(ctx) - - if d.HasChanges("project_visibility", "resource_access_role") { - visInput := &codebuild.UpdateProjectVisibilityInput{ - ProjectArn: aws.String(d.Id()), - ProjectVisibility: aws.String(d.Get("project_visibility").(string)), - } - - if v, ok := d.GetOk("resource_access_role"); ok { - visInput.ResourceAccessRole = aws.String(v.(string)) - } +func expandProjectBuildBatchConfig(tfMap map[string]interface{}) *types.ProjectBuildBatchConfig { + if tfMap == nil { + return nil + } - _, err := conn.UpdateProjectVisibilityWithContext(ctx, visInput) - if err != nil { - return sdkdiag.AppendErrorf(diags, "updating CodeBuild project (%s) visibility: %s", d.Id(), err) - } + apiObject := &types.ProjectBuildBatchConfig{ + ServiceRole: aws.String(tfMap["service_role"].(string)), } - if d.HasChangesExcept("project_visibility", "resource_access_role") { - input := &codebuild.UpdateProjectInput{ - Name: aws.String(d.Get("name").(string)), - } + if v, ok := tfMap["combine_artifacts"].(bool); ok { + apiObject.CombineArtifacts = aws.Bool(v) + } - if d.HasChange("environment") { - projectEnv := expandProjectEnvironment(d) - input.Environment = projectEnv - } + if v, ok := tfMap["restrictions"].([]interface{}); ok && len(v) > 0 && v[0] != nil { + apiObject.Restrictions = expandBatchRestrictions(v[0].(map[string]interface{})) + } - if d.HasChange("file_system_locations") { - projectFileSystemLocations := expandProjectFileSystemLocations(d) - input.FileSystemLocations = projectFileSystemLocations - } + if v, ok := tfMap["timeout_in_mins"].(int); ok && v != 0 { + apiObject.TimeoutInMins = aws.Int32(int32(v)) + } - if d.HasChange("source") { - projectSource := expandProjectSource(d) - input.Source = &projectSource - } + return apiObject +} - if d.HasChange("artifacts") { - projectArtifacts := expandProjectArtifacts(d) - input.Artifacts = &projectArtifacts - } +func expandBatchRestrictions(tfMap map[string]interface{}) *types.BatchRestrictions { + if tfMap == nil { + return nil + } - if d.HasChange("secondary_sources") { - _, n := d.GetChange("secondary_sources") + apiObject := &types.BatchRestrictions{} - if n.(*schema.Set).Len() > 0 { - projectSecondarySources := expandProjectSecondarySources(d) - input.SecondarySources = projectSecondarySources - } else { - input.SecondarySources = []*codebuild.ProjectSource{} - } - } + if v, ok := tfMap["compute_types_allowed"].([]interface{}); ok && len(v) > 0 { + apiObject.ComputeTypesAllowed = flex.ExpandStringValueList(v) + } - if d.HasChange("secondary_source_version") { - _, n := d.GetChange("secondary_source_version") + if v, ok := tfMap["maximum_builds_allowed"].(int); ok && v != 0 { + apiObject.MaximumBuildsAllowed = aws.Int32(int32(v)) + } - psv := d.Get("secondary_source_version").(*schema.Set) + return apiObject +} - if n.(*schema.Set).Len() > 0 { - input.SecondarySourceVersions = expandProjectSecondarySourceVersions(psv) - } else { - input.SecondarySourceVersions = []*codebuild.ProjectSourceVersion{} - } - } +func expandVPCConfig(tfMap map[string]interface{}) *types.VpcConfig { + if tfMap == nil { + return nil + } - if d.HasChange("secondary_artifacts") { - _, n := d.GetChange("secondary_artifacts") + apiObject := &types.VpcConfig{ + SecurityGroupIds: flex.ExpandStringValueSet(tfMap["security_group_ids"].(*schema.Set)), + Subnets: flex.ExpandStringValueSet(tfMap["subnets"].(*schema.Set)), + VpcId: aws.String(tfMap["vpc_id"].(string)), + } - if n.(*schema.Set).Len() > 0 { - projectSecondaryArtifacts := expandProjectSecondaryArtifacts(d) - input.SecondaryArtifacts = projectSecondaryArtifacts - } else { - input.SecondaryArtifacts = []*codebuild.ProjectArtifacts{} - } - } + return apiObject +} - if d.HasChange("vpc_config") { - input.VpcConfig = expandVPCConfig(d.Get("vpc_config").([]interface{})) - } +func expandProjectSecondarySources(tfList []interface{}) []types.ProjectSource { + if len(tfList) == 0 { + return nil + } - if d.HasChange("logs_config") { - logsConfig := expandProjectLogsConfig(d) - input.LogsConfig = logsConfig - } + apiObjects := make([]types.ProjectSource, 0) - if d.HasChange("build_batch_config") { - input.BuildBatchConfig = expandBuildBatchConfig(d) - // If BuildBatchConfig is nil we should remove it by passing an empty struct. - if input.BuildBatchConfig == nil { - input.BuildBatchConfig = &codebuild.ProjectBuildBatchConfig{} - } + for _, tfMapRaw := range tfList { + tfMap, ok := tfMapRaw.(map[string]interface{}) + if !ok { + continue } - if d.HasChange("cache") { - if v, ok := d.GetOk("cache"); ok { - input.Cache = expandProjectCache(v.([]interface{})) - } else { - input.Cache = &codebuild.ProjectCache{ - Type: aws.String("NO_CACHE"), - } - } - } + apiObject := expandProjectSource(tfMap) - if d.HasChange("concurrent_build_limit") { - input.ConcurrentBuildLimit = aws.Int64(int64(d.Get("concurrent_build_limit").(int))) + if apiObject == nil { + continue } - if d.HasChange("description") { - input.Description = aws.String(d.Get("description").(string)) - } + apiObjects = append(apiObjects, *apiObject) + } - if d.HasChange("encryption_key") { - input.EncryptionKey = aws.String(d.Get("encryption_key").(string)) - } + return apiObjects +} - if d.HasChange("service_role") { - input.ServiceRole = aws.String(d.Get("service_role").(string)) - } +func expandProjectSource(tfMap map[string]interface{}) *types.ProjectSource { + if tfMap == nil { + return nil + } - if d.HasChange("source_version") { - input.SourceVersion = aws.String(d.Get("source_version").(string)) - } + sourceType := types.SourceType(tfMap["type"].(string)) + apiObject := &types.ProjectSource{ + Buildspec: aws.String(tfMap["buildspec"].(string)), + GitCloneDepth: aws.Int32(int32(tfMap["git_clone_depth"].(int))), + InsecureSsl: aws.Bool(tfMap["insecure_ssl"].(bool)), + Type: sourceType, + } - if d.HasChange("build_timeout") { - input.TimeoutInMinutes = aws.Int64(int64(d.Get("build_timeout").(int))) - } + if v, ok := tfMap["location"].(string); ok && v != "" { + apiObject.Location = aws.String(v) + } - if d.HasChange("queued_timeout") { - input.QueuedTimeoutInMinutes = aws.Int64(int64(d.Get("queued_timeout").(int))) - } + if v, ok := tfMap["source_identifier"].(string); ok && v != "" { + apiObject.SourceIdentifier = aws.String(v) + } - if d.HasChange("badge_enabled") { - input.BadgeEnabled = aws.Bool(d.Get("badge_enabled").(bool)) - } + // Only valid for BITBUCKET, GITHUB, and GITHUB_ENTERPRISE source types, e.g. + // InvalidInputException: Source type NO_SOURCE does not support ReportBuildStatus + if sourceType == types.SourceTypeBitbucket || sourceType == types.SourceTypeGithub || sourceType == types.SourceTypeGithubEnterprise { + apiObject.ReportBuildStatus = aws.Bool(tfMap["report_build_status"].(bool)) + } - // The documentation clearly says "The replacement set of tags for this build project." - // But its a slice of pointers so if not set for every update, they get removed. - input.Tags = getTagsIn(ctx) + // Only valid for CODECOMMIT, GITHUB, GITHUB_ENTERPRISE, BITBUCKET source types. + if sourceType == types.SourceTypeCodecommit || sourceType == types.SourceTypeGithub || sourceType == types.SourceTypeGithubEnterprise || sourceType == types.SourceTypeBitbucket { + if v, ok := tfMap["git_submodules_config"].([]interface{}); ok && len(v) > 0 && v[0] != nil { + tfMap := v[0].(map[string]interface{}) - // Handle IAM eventual consistency - err := retry.RetryContext(ctx, propagationTimeout, func() *retry.RetryError { - _, err := conn.UpdateProjectWithContext(ctx, input) - if err != nil { - // InvalidInputException: CodeBuild is not authorized to perform - // InvalidInputException: Not authorized to perform DescribeSecurityGroups - if tfawserr.ErrMessageContains(err, codebuild.ErrCodeInvalidInputException, "ot authorized to perform") { - return retry.RetryableError(err) - } + gitSubmodulesConfig := &types.GitSubmodulesConfig{} - return retry.NonRetryableError(err) + if v, ok := tfMap["fetch_submodules"].(bool); ok { + gitSubmodulesConfig.FetchSubmodules = aws.Bool(v) } - return nil - }) - - if tfresource.TimedOut(err) { - _, err = conn.UpdateProjectWithContext(ctx, input) - } - if err != nil { - return sdkdiag.AppendErrorf(diags, "updating CodeBuild project (%s): %s", d.Id(), err) + apiObject.GitSubmodulesConfig = gitSubmodulesConfig } } - return append(diags, resourceProjectRead(ctx, d, meta)...) -} + // Only valid for BITBUCKET, GITHUB, GITHUB_ENTERPRISE source types. + if sourceType == types.SourceTypeBitbucket || sourceType == types.SourceTypeGithub || sourceType == types.SourceTypeGithubEnterprise { + if v, ok := tfMap["build_status_config"].([]interface{}); ok && len(v) > 0 && v[0] != nil { + tfMap := v[0].(map[string]interface{}) -func resourceProjectDelete(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - var diags diag.Diagnostics - conn := meta.(*conns.AWSClient).CodeBuildConn(ctx) + buildStatusConfig := &types.BuildStatusConfig{} - _, err := conn.DeleteProjectWithContext(ctx, &codebuild.DeleteProjectInput{ - Name: aws.String(d.Id()), - }) - if err != nil { - return sdkdiag.AppendErrorf(diags, "deleting CodeBuild project (%s): %s", d.Id(), err) + if v, ok := tfMap["context"].(string); ok && v != "" { + buildStatusConfig.Context = aws.String(v) + } + if v, ok := tfMap["target_url"].(string); ok && v != "" { + buildStatusConfig.TargetUrl = aws.String(v) + } + + apiObject.BuildStatusConfig = buildStatusConfig + } } - return diags + + return apiObject } -func flattenProjectFileSystemLocations(apiObjects []*codebuild.ProjectFileSystemLocation) []interface{} { +func flattenProjectFileSystemLocations(apiObjects []types.ProjectFileSystemLocation) []interface{} { if len(apiObjects) == 0 { return nil } @@ -1580,392 +1615,317 @@ func flattenProjectFileSystemLocations(apiObjects []*codebuild.ProjectFileSystem var tfList []interface{} for _, apiObject := range apiObjects { - if apiObject == nil { - continue - } - tfList = append(tfList, flattenProjectFileSystemLocation(apiObject)) } return tfList } -func flattenProjectFileSystemLocation(apiObject *codebuild.ProjectFileSystemLocation) map[string]interface{} { - if apiObject == nil { - return nil +func flattenProjectFileSystemLocation(apiObject types.ProjectFileSystemLocation) map[string]interface{} { + tfMap := map[string]interface{}{ + "types": apiObject.Type, } - tfMap := map[string]interface{}{} - if v := apiObject.Identifier; v != nil { - tfMap["identifier"] = aws.StringValue(v) + tfMap["identifier"] = aws.ToString(v) } if v := apiObject.Location; v != nil { - tfMap["location"] = aws.StringValue(v) + tfMap["location"] = aws.ToString(v) } if v := apiObject.MountOptions; v != nil { - tfMap["mount_options"] = aws.StringValue(v) + tfMap["mount_options"] = aws.ToString(v) } if v := apiObject.MountPoint; v != nil { - tfMap["mount_point"] = aws.StringValue(v) - } - - if v := apiObject.Type; v != nil { - tfMap["type"] = aws.StringValue(v) + tfMap["mount_point"] = aws.ToString(v) } return tfMap } -func flattenLogsConfig(logsConfig *codebuild.LogsConfig) []interface{} { - if logsConfig == nil { +func flattenLogsConfig(apiObject *types.LogsConfig) []interface{} { + if apiObject == nil { return []interface{}{} } - values := map[string]interface{}{} + tfMap := map[string]interface{}{} - if v := logsConfig.CloudWatchLogs; v != nil { - values["cloudwatch_logs"] = flattenCloudWatchLogs(v) + if v := apiObject.CloudWatchLogs; v != nil { + tfMap["cloudwatch_logs"] = flattenCloudWatchLogs(v) } - if v := logsConfig.S3Logs; v != nil { - values["s3_logs"] = flattenS3Logs(v) + if v := apiObject.S3Logs; v != nil { + tfMap["s3_logs"] = flattenS3Logs(v) } - return []interface{}{values} + return []interface{}{tfMap} } -func flattenCloudWatchLogs(cloudWatchLogsConfig *codebuild.CloudWatchLogsConfig) []interface{} { - values := map[string]interface{}{} +func flattenCloudWatchLogs(apiObject *types.CloudWatchLogsConfig) []interface{} { + tfMap := map[string]interface{}{} - if cloudWatchLogsConfig == nil { - values["status"] = codebuild.LogsConfigStatusTypeDisabled + if apiObject == nil { + tfMap["status"] = types.LogsConfigStatusTypeDisabled } else { - values["status"] = aws.StringValue(cloudWatchLogsConfig.Status) - values["group_name"] = aws.StringValue(cloudWatchLogsConfig.GroupName) - values["stream_name"] = aws.StringValue(cloudWatchLogsConfig.StreamName) + tfMap["status"] = apiObject.Status + tfMap["group_name"] = aws.ToString(apiObject.GroupName) + tfMap["stream_name"] = aws.ToString(apiObject.StreamName) } - return []interface{}{values} + return []interface{}{tfMap} } -func flattenS3Logs(s3LogsConfig *codebuild.S3LogsConfig) []interface{} { - values := map[string]interface{}{} +func flattenS3Logs(apiObject *types.S3LogsConfig) []interface{} { + tfMap := map[string]interface{}{} - if s3LogsConfig == nil { - values["status"] = codebuild.LogsConfigStatusTypeDisabled + if apiObject == nil { + tfMap["status"] = types.LogsConfigStatusTypeDisabled } else { - values["status"] = aws.StringValue(s3LogsConfig.Status) - values["location"] = aws.StringValue(s3LogsConfig.Location) - values["encryption_disabled"] = aws.BoolValue(s3LogsConfig.EncryptionDisabled) - values["bucket_owner_access"] = aws.StringValue(s3LogsConfig.BucketOwnerAccess) + tfMap["status"] = apiObject.Status + tfMap["location"] = aws.ToString(apiObject.Location) + tfMap["encryption_disabled"] = aws.ToBool(apiObject.EncryptionDisabled) + tfMap["bucket_owner_access"] = apiObject.BucketOwnerAccess } - return []interface{}{values} + return []interface{}{tfMap} } -func flattenProjectSecondaryArtifacts(artifactsList []*codebuild.ProjectArtifacts) *schema.Set { - artifactSet := schema.Set{ - F: resourceProjectArtifactsHash, - } +func flattenProjectSecondaryArtifacts(apiObjects []types.ProjectArtifacts) []interface{} { + tfList := []interface{}{} - for _, artifacts := range artifactsList { - artifactSet.Add(flattenProjectArtifactsData(*artifacts)) + for _, apiObject := range apiObjects { + tfList = append(tfList, flattenProjectArtifacts(apiObject)) } - return &artifactSet -} - -func flattenProjectArtifacts(artifacts *codebuild.ProjectArtifacts) []interface{} { - return []interface{}{flattenProjectArtifactsData(*artifacts)} + return tfList } -func flattenProjectArtifactsData(artifacts codebuild.ProjectArtifacts) map[string]interface{} { - values := map[string]interface{}{} - - values["type"] = aws.StringValue(artifacts.Type) - - if artifacts.ArtifactIdentifier != nil { - values["artifact_identifier"] = aws.StringValue(artifacts.ArtifactIdentifier) - } - - if artifacts.EncryptionDisabled != nil { - values["encryption_disabled"] = aws.BoolValue(artifacts.EncryptionDisabled) +func flattenProjectArtifacts(apiObject types.ProjectArtifacts) map[string]interface{} { + tfMap := map[string]interface{}{ + "bucket_owner_access": apiObject.BucketOwnerAccess, + "namespace_type": apiObject.NamespaceType, + "packaging": apiObject.Packaging, + "type": apiObject.Type, } - if artifacts.OverrideArtifactName != nil { - values["override_artifact_name"] = aws.BoolValue(artifacts.OverrideArtifactName) + if apiObject.ArtifactIdentifier != nil { + tfMap["artifact_identifier"] = aws.ToString(apiObject.ArtifactIdentifier) } - if artifacts.Location != nil { - values["location"] = aws.StringValue(artifacts.Location) + if apiObject.EncryptionDisabled != nil { + tfMap["encryption_disabled"] = aws.ToBool(apiObject.EncryptionDisabled) } - if artifacts.Name != nil { - values["name"] = aws.StringValue(artifacts.Name) + if apiObject.Location != nil { + tfMap["location"] = aws.ToString(apiObject.Location) } - if artifacts.NamespaceType != nil { - values["namespace_type"] = aws.StringValue(artifacts.NamespaceType) + if apiObject.OverrideArtifactName != nil { + tfMap["override_artifact_name"] = aws.ToBool(apiObject.OverrideArtifactName) } - if artifacts.Packaging != nil { - values["packaging"] = aws.StringValue(artifacts.Packaging) + if apiObject.Name != nil { + tfMap["name"] = aws.ToString(apiObject.Name) } - if artifacts.Path != nil { - values["path"] = aws.StringValue(artifacts.Path) + if apiObject.Path != nil { + tfMap["path"] = aws.ToString(apiObject.Path) } - if artifacts.BucketOwnerAccess != nil { - values["bucket_owner_access"] = aws.StringValue(artifacts.BucketOwnerAccess) - } - return values + return tfMap } -func flattenProjectCache(cache *codebuild.ProjectCache) []interface{} { - if cache == nil { +func flattenProjectCache(apiObject *types.ProjectCache) []interface{} { + if apiObject == nil { return []interface{}{} } - values := map[string]interface{}{ - "location": aws.StringValue(cache.Location), - "type": aws.StringValue(cache.Type), - "modes": aws.StringValueSlice(cache.Modes), + tfMap := map[string]interface{}{ + "location": aws.ToString(apiObject.Location), + "modes": apiObject.Modes, + "type": apiObject.Type, } - return []interface{}{values} + return []interface{}{tfMap} } -func flattenProjectEnvironment(environment *codebuild.ProjectEnvironment) []interface{} { - envConfig := map[string]interface{}{} - - envConfig["type"] = aws.StringValue(environment.Type) - envConfig["compute_type"] = aws.StringValue(environment.ComputeType) - envConfig["image"] = aws.StringValue(environment.Image) - envConfig["certificate"] = aws.StringValue(environment.Certificate) - envConfig["privileged_mode"] = aws.BoolValue(environment.PrivilegedMode) - envConfig["image_pull_credentials_type"] = aws.StringValue(environment.ImagePullCredentialsType) +func flattenProjectEnvironment(apiObject *types.ProjectEnvironment) []interface{} { + tfMap := map[string]interface{}{ + "compute_type": apiObject.ComputeType, + "image_pull_credentials_type": apiObject.ImagePullCredentialsType, + "type": apiObject.Type, + } - envConfig["registry_credential"] = flattenRegistryCredential(environment.RegistryCredential) + tfMap["image"] = aws.ToString(apiObject.Image) + tfMap["certificate"] = aws.ToString(apiObject.Certificate) + tfMap["privileged_mode"] = aws.ToBool(apiObject.PrivilegedMode) + tfMap["registry_credential"] = flattenRegistryCredential(apiObject.RegistryCredential) - if environment.EnvironmentVariables != nil { - envConfig["environment_variable"] = environmentVariablesToMap(environment.EnvironmentVariables) + if apiObject.EnvironmentVariables != nil { + tfMap["environment_variable"] = flattenEnvironmentVariables(apiObject.EnvironmentVariables) } - return []interface{}{envConfig} + return []interface{}{tfMap} } -func flattenRegistryCredential(registryCredential *codebuild.RegistryCredential) []interface{} { - if registryCredential == nil { +func flattenRegistryCredential(apiObject *types.RegistryCredential) []interface{} { + if apiObject == nil { return []interface{}{} } - values := map[string]interface{}{ - "credential": aws.StringValue(registryCredential.Credential), - "credential_provider": aws.StringValue(registryCredential.CredentialProvider), + tfMap := map[string]interface{}{ + "credential": aws.ToString(apiObject.Credential), + "credential_provider": apiObject.CredentialProvider, } - return []interface{}{values} + return []interface{}{tfMap} } -func flattenProjectSecondarySources(sourceList []*codebuild.ProjectSource) []interface{} { - l := make([]interface{}, 0) +func flattenProjectSecondarySources(apiObject []types.ProjectSource) []interface{} { + tfList := make([]interface{}, 0) - for _, source := range sourceList { - l = append(l, flattenProjectSourceData(source)) + for _, apiObject := range apiObject { + tfList = append(tfList, flattenProjectSource(apiObject)) } - return l -} - -func flattenProjectSource(source *codebuild.ProjectSource) []interface{} { - l := make([]interface{}, 1) - - l[0] = flattenProjectSourceData(source) - - return l + return tfList } -func flattenProjectSourceData(source *codebuild.ProjectSource) interface{} { - m := map[string]interface{}{ - "buildspec": aws.StringValue(source.Buildspec), - "location": aws.StringValue(source.Location), - "git_clone_depth": int(aws.Int64Value(source.GitCloneDepth)), - "insecure_ssl": aws.BoolValue(source.InsecureSsl), - "report_build_status": aws.BoolValue(source.ReportBuildStatus), - "type": aws.StringValue(source.Type), +func flattenProjectSource(apiObject types.ProjectSource) interface{} { + tfMap := map[string]interface{}{ + "buildspec": aws.ToString(apiObject.Buildspec), + "location": aws.ToString(apiObject.Location), + "git_clone_depth": aws.ToInt32(apiObject.GitCloneDepth), + "insecure_ssl": aws.ToBool(apiObject.InsecureSsl), + "report_build_status": aws.ToBool(apiObject.ReportBuildStatus), + "type": apiObject.Type, } - m["git_submodules_config"] = flattenProjectGitSubmodulesConfig(source.GitSubmodulesConfig) + tfMap["git_submodules_config"] = flattenProjectGitSubmodulesConfig(apiObject.GitSubmodulesConfig) - m["build_status_config"] = flattenProjectBuildStatusConfig(source.BuildStatusConfig) + tfMap["build_status_config"] = flattenProjectBuildStatusConfig(apiObject.BuildStatusConfig) - if source.SourceIdentifier != nil { - m["source_identifier"] = aws.StringValue(source.SourceIdentifier) + if apiObject.SourceIdentifier != nil { + tfMap["source_identifier"] = aws.ToString(apiObject.SourceIdentifier) } - return m + return tfMap } -func flattenProjectSecondarySourceVersions(sourceVersions []*codebuild.ProjectSourceVersion) []interface{} { - l := make([]interface{}, 0) +func flattenProjectSecondarySourceVersions(apiObjects []types.ProjectSourceVersion) []interface{} { + tfList := make([]interface{}, 0) - for _, sourceVersion := range sourceVersions { - l = append(l, flattenProjectSourceVersionsData(sourceVersion)) + for _, apiObject := range apiObjects { + tfList = append(tfList, flattenProjectSourceVersion(apiObject)) } - return l + return tfList } -func flattenProjectSourceVersionsData(sourceVersion *codebuild.ProjectSourceVersion) map[string]interface{} { - values := map[string]interface{}{} +func flattenProjectSourceVersion(apiObject types.ProjectSourceVersion) map[string]interface{} { + tfMap := map[string]interface{}{} - if sourceVersion.SourceIdentifier != nil { - values["source_identifier"] = aws.StringValue(sourceVersion.SourceIdentifier) + if apiObject.SourceIdentifier != nil { + tfMap["source_identifier"] = aws.ToString(apiObject.SourceIdentifier) } - if sourceVersion.SourceVersion != nil { - values["source_version"] = aws.StringValue(sourceVersion.SourceVersion) + if apiObject.SourceVersion != nil { + tfMap["source_version"] = aws.ToString(apiObject.SourceVersion) } - return values + return tfMap } -func flattenProjectGitSubmodulesConfig(config *codebuild.GitSubmodulesConfig) []interface{} { - if config == nil { +func flattenProjectGitSubmodulesConfig(apiObject *types.GitSubmodulesConfig) []interface{} { + if apiObject == nil { return []interface{}{} } - values := map[string]interface{}{ - "fetch_submodules": aws.BoolValue(config.FetchSubmodules), + tfMap := map[string]interface{}{ + "fetch_submodules": aws.ToBool(apiObject.FetchSubmodules), } - return []interface{}{values} + return []interface{}{tfMap} } -func flattenProjectBuildStatusConfig(config *codebuild.BuildStatusConfig) []interface{} { - if config == nil { +func flattenProjectBuildStatusConfig(apiObject *types.BuildStatusConfig) []interface{} { + if apiObject == nil { return []interface{}{} } - values := map[string]interface{}{ - "context": aws.StringValue(config.Context), - "target_url": aws.StringValue(config.TargetUrl), + tfMap := map[string]interface{}{ + "context": aws.ToString(apiObject.Context), + "target_url": aws.ToString(apiObject.TargetUrl), } - return []interface{}{values} -} - -func flattenVPCConfig(vpcConfig *codebuild.VpcConfig) []interface{} { - if vpcConfig != nil { - values := map[string]interface{}{} - - values["vpc_id"] = aws.StringValue(vpcConfig.VpcId) - values["subnets"] = flex.FlattenStringSet(vpcConfig.Subnets) - values["security_group_ids"] = flex.FlattenStringSet(vpcConfig.SecurityGroupIds) - - return []interface{}{values} - } - return nil + return []interface{}{tfMap} } -func flattenBuildBatchConfig(buildBatchConfig *codebuild.ProjectBuildBatchConfig) []interface{} { - if buildBatchConfig == nil { +func flattenVPCConfig(apiObject *types.VpcConfig) []interface{} { + if apiObject == nil { return nil } - values := map[string]interface{}{} - - values["service_role"] = aws.StringValue(buildBatchConfig.ServiceRole) - - if buildBatchConfig.CombineArtifacts != nil { - values["combine_artifacts"] = aws.BoolValue(buildBatchConfig.CombineArtifacts) - } + tfMap := map[string]interface{}{} - if buildBatchConfig.Restrictions != nil { - values["restrictions"] = flattenBuildBatchRestrictionsConfig(buildBatchConfig.Restrictions) - } + tfMap["vpc_id"] = aws.ToString(apiObject.VpcId) + tfMap["subnets"] = apiObject.Subnets + tfMap["security_group_ids"] = apiObject.SecurityGroupIds - if buildBatchConfig.TimeoutInMins != nil { - values["timeout_in_mins"] = aws.Int64Value(buildBatchConfig.TimeoutInMins) - } + return []interface{}{tfMap} - return []interface{}{values} } -func flattenBuildBatchRestrictionsConfig(restrictions *codebuild.BatchRestrictions) []interface{} { - if restrictions == nil { - return []interface{}{} - } - - values := map[string]interface{}{ - "compute_types_allowed": aws.StringValueSlice(restrictions.ComputeTypesAllowed), - "maximum_builds_allowed": aws.Int64Value(restrictions.MaximumBuildsAllowed), +func flattenBuildBatchConfig(apiObject *types.ProjectBuildBatchConfig) []interface{} { + if apiObject == nil { + return nil } - return []interface{}{values} -} - -func resourceProjectArtifactsHash(v interface{}) int { - var buf bytes.Buffer - m := v.(map[string]interface{}) + tfMap := map[string]interface{}{} - if v, ok := m["artifact_identifier"]; ok { - buf.WriteString(fmt.Sprintf("%s-", v.(string))) - } + tfMap["service_role"] = aws.ToString(apiObject.ServiceRole) - if v, ok := m["encryption_disabled"]; ok { - buf.WriteString(fmt.Sprintf("%t-", v.(bool))) + if apiObject.CombineArtifacts != nil { + tfMap["combine_artifacts"] = aws.ToBool(apiObject.CombineArtifacts) } - if v, ok := m["location"]; ok { - buf.WriteString(fmt.Sprintf("%s-", v.(string))) + if apiObject.Restrictions != nil { + tfMap["restrictions"] = flattenBuildBatchRestrictionsConfig(apiObject.Restrictions) } - if v, ok := m["namespace_type"]; ok { - buf.WriteString(fmt.Sprintf("%s-", v.(string))) + if apiObject.TimeoutInMins != nil { + tfMap["timeout_in_mins"] = aws.ToInt32(apiObject.TimeoutInMins) } - if v, ok := m["override_artifact_name"]; ok { - buf.WriteString(fmt.Sprintf("%t-", v.(bool))) - } + return []interface{}{tfMap} +} - if v, ok := m["packaging"]; ok { - buf.WriteString(fmt.Sprintf("%s-", v.(string))) +func flattenBuildBatchRestrictionsConfig(apiObject *types.BatchRestrictions) []interface{} { + if apiObject == nil { + return []interface{}{} } - if v, ok := m["path"]; ok { - buf.WriteString(fmt.Sprintf("%s-", v.(string))) + tfMap := map[string]interface{}{ + "compute_types_allowed": apiObject.ComputeTypesAllowed, + "maximum_builds_allowed": aws.ToInt32(apiObject.MaximumBuildsAllowed), } - if v, ok := m["type"]; ok { - buf.WriteString(fmt.Sprintf("%s-", v.(string))) - } + return []interface{}{tfMap} +} - if v, ok := m["bucket_owner_access"]; ok { - buf.WriteString(fmt.Sprintf("%s-", v.(string))) - } +func flattenEnvironmentVariables(apiObjects []types.EnvironmentVariable) []interface{} { + tfList := []interface{}{} - return create.StringHashcode(buf.String()) -} + for _, apiObject := range apiObjects { + tfMap := map[string]interface{}{} + tfMap["name"] = aws.ToString(apiObject.Name) + tfMap["value"] = aws.ToString(apiObject.Value) + tfMap["type"] = apiObject.Type -func environmentVariablesToMap(environmentVariables []*codebuild.EnvironmentVariable) []interface{} { - envVariables := []interface{}{} - if len(environmentVariables) > 0 { - for _, env := range environmentVariables { - item := map[string]interface{}{} - item["name"] = aws.StringValue(env.Name) - item["value"] = aws.StringValue(env.Value) - if env.Type != nil { - item["type"] = aws.StringValue(env.Type) - } - envVariables = append(envVariables, item) - } + tfList = append(tfList, tfMap) } - return envVariables + return tfList } func ValidProjectName(v interface{}, k string) (ws []string, errors []error) { diff --git a/internal/service/codebuild/service_package_gen.go b/internal/service/codebuild/service_package_gen.go index d7a5c5bddc7..e2bbba731f2 100644 --- a/internal/service/codebuild/service_package_gen.go +++ b/internal/service/codebuild/service_package_gen.go @@ -29,7 +29,7 @@ func (p *servicePackage) SDKDataSources(ctx context.Context) []*types.ServicePac func (p *servicePackage) SDKResources(ctx context.Context) []*types.ServicePackageSDKResource { return []*types.ServicePackageSDKResource{ { - Factory: ResourceProject, + Factory: resourceProject, TypeName: "aws_codebuild_project", Name: "Project", Tags: &types.ServicePackageResourceTags{}, diff --git a/internal/service/codebuild/webhook.go b/internal/service/codebuild/webhook.go index 8d7b96ddcdc..fc30667fcd7 100644 --- a/internal/service/codebuild/webhook.go +++ b/internal/service/codebuild/webhook.go @@ -16,7 +16,6 @@ import ( "github.com/hashicorp/terraform-provider-aws/internal/enum" "github.com/hashicorp/terraform-provider-aws/internal/errs" "github.com/hashicorp/terraform-provider-aws/internal/errs/sdkdiag" - tfslices "github.com/hashicorp/terraform-provider-aws/internal/slices" "github.com/hashicorp/terraform-provider-aws/internal/tfresource" ) @@ -209,7 +208,7 @@ func resourceWebhookDelete(ctx context.Context, d *schema.ResourceData, meta int } func findWebhookByProjectName(ctx context.Context, conn *codebuild.Client, name string) (*types.Webhook, error) { - output, err := findProjectByName(ctx, conn, name) + output, err := findProjectByNameOrARN(ctx, conn, name) if err != nil { return nil, err @@ -222,38 +221,6 @@ func findWebhookByProjectName(ctx context.Context, conn *codebuild.Client, name return output.Webhook, nil } -func findProjectByName(ctx context.Context, conn *codebuild.Client, name string) (*types.Project, error) { - input := &codebuild.BatchGetProjectsInput{ - Names: tfslices.Of(name), - } - - return findProject(ctx, conn, input) -} - -func findProject(ctx context.Context, conn *codebuild.Client, input *codebuild.BatchGetProjectsInput) (*types.Project, error) { - output, err := findProjects(ctx, conn, input) - - if err != nil { - return nil, err - } - - return tfresource.AssertSingleValueResult(output) -} - -func findProjects(ctx context.Context, conn *codebuild.Client, input *codebuild.BatchGetProjectsInput) ([]types.Project, error) { - output, err := conn.BatchGetProjects(ctx, input) - - if err != nil { - return nil, err - } - - if output == nil { - return nil, tfresource.NewEmptyResultError(input) - } - - return output.Projects, nil -} - func expandWebhookFilterGroups(tfList []interface{}) [][]types.WebhookFilter { if len(tfList) == 0 { return nil From cbc248e044b618469b110fa647d22492a6c20192 Mon Sep 17 00:00:00 2001 From: Kit Ewbank Date: Mon, 22 Jan 2024 17:03:46 -0500 Subject: [PATCH 11/30] codebuild: Migrate sweepers to AWS SDK for Go v2. --- internal/service/codebuild/sweep.go | 125 +++++++++++++--------------- 1 file changed, 60 insertions(+), 65 deletions(-) diff --git a/internal/service/codebuild/sweep.go b/internal/service/codebuild/sweep.go index c64078194a6..8a2069bb8b5 100644 --- a/internal/service/codebuild/sweep.go +++ b/internal/service/codebuild/sweep.go @@ -7,12 +7,11 @@ import ( "fmt" "log" - "github.com/aws/aws-sdk-go/aws" - "github.com/aws/aws-sdk-go/service/codebuild" - "github.com/hashicorp/go-multierror" + "github.com/aws/aws-sdk-go-v2/aws" + "github.com/aws/aws-sdk-go-v2/service/codebuild" "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-provider-aws/internal/sweep" - "github.com/hashicorp/terraform-provider-aws/internal/sweep/awsv1" + "github.com/hashicorp/terraform-provider-aws/internal/sweep/awsv2" ) func RegisterSweepers() { @@ -38,40 +37,37 @@ func sweepReportGroups(region string) error { if err != nil { return fmt.Errorf("error getting client: %w", err) } - - conn := client.CodeBuildConn(ctx) + conn := client.CodeBuildClient(ctx) + input := &codebuild.ListReportGroupsInput{} sweepResources := make([]sweep.Sweepable, 0) - input := &codebuild.ListReportGroupsInput{} - err = conn.ListReportGroupsPagesWithContext(ctx, input, func(page *codebuild.ListReportGroupsOutput, lastPage bool) bool { - if page == nil { - return !lastPage + pages := codebuild.NewListReportGroupsPaginator(conn, input) + for pages.HasMorePages() { + page, err := pages.NextPage(ctx) + + if awsv2.SkipSweepError(err) { + log.Printf("[WARN] Skipping CodeBuild Report Group sweep for %s: %s", region, err) + return nil + } + + if err != nil { + return fmt.Errorf("error listing CodeBuild ReportGroups (%s): %w", region, err) } - for _, arn := range page.ReportGroups { - id := aws.StringValue(arn) - r := ResourceReportGroup() + for _, v := range page.ReportGroups { + r := resourceReportGroup() d := r.Data(nil) - d.SetId(id) + d.SetId(v) d.Set("delete_reports", true) sweepResources = append(sweepResources, sweep.NewSweepResource(r, d, client)) } - - return !lastPage - }) - - if awsv1.SkipSweepError(err) { - log.Printf("[WARN] Skipping CodeBuild Report Group sweep for %s: %s", region, err) - return nil } - if err != nil { - return fmt.Errorf("error retrieving CodeBuild ReportGroups: %w", err) - } + err = sweep.SweepOrchestrator(ctx, sweepResources) - if err := sweep.SweepOrchestrator(ctx, sweepResources); err != nil { - return fmt.Errorf("error sweeping CodeBuild ReportGroups: %w", err) + if err != nil { + return fmt.Errorf("error sweeping CodeBuild ReportGroups (%s): %w", region, err) } return nil @@ -83,38 +79,36 @@ func sweepProjects(region string) error { if err != nil { return fmt.Errorf("error getting client: %w", err) } - - conn := client.CodeBuildConn(ctx) + conn := client.CodeBuildClient(ctx) + input := &codebuild.ListProjectsInput{} sweepResources := make([]sweep.Sweepable, 0) - input := &codebuild.ListProjectsInput{} - err = conn.ListProjectsPagesWithContext(ctx, input, func(page *codebuild.ListProjectsOutput, lastPage bool) bool { - if page == nil { - return !lastPage + pages := codebuild.NewListProjectsPaginator(conn, input) + for pages.HasMorePages() { + page, err := pages.NextPage(ctx) + + if awsv2.SkipSweepError(err) { + log.Printf("[WARN] Skipping CodeBuild Project sweep for %s: %s", region, err) + return nil + } + + if err != nil { + return fmt.Errorf("error listing CodeBuild Projects (%s): %w", region, err) } - for _, arn := range page.Projects { - id := aws.StringValue(arn) - r := ResourceProject() + for _, v := range page.Projects { + r := resourceProject() d := r.Data(nil) - d.SetId(id) + d.SetId(v) sweepResources = append(sweepResources, sweep.NewSweepResource(r, d, client)) } + } - return !lastPage - }) + err = sweep.SweepOrchestrator(ctx, sweepResources) - if awsv1.SkipSweepError(err) { - log.Printf("[WARN] Skipping CodeBuild Project sweep for %s: %s", region, err) - return nil - } if err != nil { - return fmt.Errorf("error retrieving CodeBuild Projects: %w", err) - } - - if err := sweep.SweepOrchestrator(ctx, sweepResources); err != nil { - return fmt.Errorf("error sweeping CodeBuild Projects: %w", err) + return fmt.Errorf("error sweeping CodeBuild Projects (%s): %w", region, err) } return nil @@ -126,34 +120,35 @@ func sweepSourceCredentials(region string) error { if err != nil { return fmt.Errorf("error getting client: %w", err) } - - conn := client.CodeBuildConn(ctx) - var sweeperErrs *multierror.Error + conn := client.CodeBuildClient(ctx) + input := &codebuild.ListSourceCredentialsInput{} sweepResources := make([]sweep.Sweepable, 0) - input := &codebuild.ListSourceCredentialsInput{} - creds, err := conn.ListSourceCredentialsWithContext(ctx, input) + output, err := conn.ListSourceCredentials(ctx, input) + + if awsv2.SkipSweepError(err) { + log.Printf("[WARN] Skipping CodeBuild Source Credential sweep for %s: %s", region, err) + return nil + } - for _, cred := range creds.SourceCredentialsInfos { - id := aws.StringValue(cred.Arn) - r := ResourceSourceCredential() + if err != nil { + return fmt.Errorf("error listing CodeBuild Source Credentials (%s): %w", region, err) + } + + for _, v := range output.SourceCredentialsInfos { + id := aws.ToString(v.Arn) + r := resourceSourceCredential() d := r.Data(nil) d.SetId(id) sweepResources = append(sweepResources, sweep.NewSweepResource(r, d, client)) } - if awsv1.SkipSweepError(err) { - log.Printf("[WARN] Skipping CodeBuild Source Credential sweep for %s: %s", region, err) - return sweeperErrs.ErrorOrNil() - } - if err != nil { - sweeperErrs = multierror.Append(sweeperErrs, fmt.Errorf("error retrieving CodeBuild Source Credentials: %w", err)) - } + err = sweep.SweepOrchestrator(ctx, sweepResources) - if err := sweep.SweepOrchestrator(ctx, sweepResources); err != nil { - sweeperErrs = multierror.Append(sweeperErrs, fmt.Errorf("error sweeping CodeBuild Source Credentials: %w", err)) + if err != nil { + return fmt.Errorf("error sweeping CodeBuild Source Credentials (%s): %w", region, err) } - return sweeperErrs.ErrorOrNil() + return nil } From a0d32d1fdcea93747b2bde0a1feac896d0b763c1 Mon Sep 17 00:00:00 2001 From: Kit Ewbank Date: Mon, 22 Jan 2024 17:18:19 -0500 Subject: [PATCH 12/30] Add 'names.CodeBuildEndpointID'. --- names/names.go | 1 + 1 file changed, 1 insertion(+) diff --git a/names/names.go b/names/names.go index 2a24dbfcd02..65ccaca70f8 100644 --- a/names/names.go +++ b/names/names.go @@ -39,6 +39,7 @@ const ( ChimeSDKMediaPipelinesEndpointID = "media-pipelines-chime" CleanRoomsEndpointID = "cleanrooms" CloudWatchLogsEndpointID = "logs" + CodeBuildEndpointID = "codebuild" CodeCommitEndpointID = "codecommit" CodeDeployEndpointID = "codedeploy" CodeGuruProfilerEndpointID = "codeguru-profiler" From e20bd93d5f0fb9ea113aa6a2fc95fcac0850f7dc Mon Sep 17 00:00:00 2001 From: Kit Ewbank Date: Mon, 22 Jan 2024 17:38:09 -0500 Subject: [PATCH 13/30] codebuild: Migrate acceptance tests to AWS SDK for Go v2. --- internal/service/codebuild/project_test.go | 427 +++++++++--------- .../service/codebuild/report_group_test.go | 60 +-- .../service/codebuild/resource_policy_test.go | 40 +- .../codebuild/source_credential_test.go | 38 +- internal/service/codebuild/webhook_test.go | 137 +++--- 5 files changed, 363 insertions(+), 339 deletions(-) diff --git a/internal/service/codebuild/project_test.go b/internal/service/codebuild/project_test.go index 2f75cd73d45..16303db6263 100644 --- a/internal/service/codebuild/project_test.go +++ b/internal/service/codebuild/project_test.go @@ -10,8 +10,8 @@ import ( "testing" "github.com/YakDriver/regexache" - "github.com/aws/aws-sdk-go/aws" - "github.com/aws/aws-sdk-go/service/codebuild" + "github.com/aws/aws-sdk-go-v2/aws" + "github.com/aws/aws-sdk-go-v2/service/codebuild/types" sdkacctest "github.com/hashicorp/terraform-plugin-testing/helper/acctest" "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-plugin-testing/terraform" @@ -19,10 +19,11 @@ import ( "github.com/hashicorp/terraform-provider-aws/internal/conns" tfcodebuild "github.com/hashicorp/terraform-provider-aws/internal/service/codebuild" "github.com/hashicorp/terraform-provider-aws/internal/tfresource" + "github.com/hashicorp/terraform-provider-aws/names" ) func init() { - acctest.RegisterServiceErrorCheckFunc(codebuild.EndpointsID, testAccErrorCheckSkip) + acctest.RegisterServiceErrorCheckFunc(names.CodeBuildEndpointID, testAccErrorCheckSkip) } func testAccErrorCheckSkip(t *testing.T) resource.ErrorCheckFunc { @@ -57,7 +58,7 @@ func testAccGitHubSourceLocationFromEnv() string { func TestAccCodeBuildProject_basic(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" @@ -65,7 +66,7 @@ func TestAccCodeBuildProject_basic(t *testing.T) { resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -79,18 +80,18 @@ func TestAccCodeBuildProject_basic(t *testing.T) { resource.TestCheckResourceAttr(resourceName, "build_timeout", "60"), resource.TestCheckResourceAttr(resourceName, "queued_timeout", "480"), resource.TestCheckResourceAttr(resourceName, "cache.#", "1"), - resource.TestCheckResourceAttr(resourceName, "cache.0.type", codebuild.CacheTypeNoCache), + resource.TestCheckResourceAttr(resourceName, "cache.0.type", string(types.CacheTypeNoCache)), resource.TestCheckResourceAttr(resourceName, "description", ""), acctest.CheckResourceAttrRegionalARN(resourceName, "encryption_key", "kms", "alias/aws/s3"), resource.TestCheckResourceAttr(resourceName, "environment.#", "1"), - resource.TestCheckResourceAttr(resourceName, "environment.0.compute_type", codebuild.ComputeTypeBuildGeneral1Small), + resource.TestCheckResourceAttr(resourceName, "environment.0.compute_type", string(types.ComputeTypeBuildGeneral1Small)), resource.TestCheckResourceAttr(resourceName, "environment.0.environment_variable.#", "0"), resource.TestCheckResourceAttr(resourceName, "environment.0.image", "2"), resource.TestCheckResourceAttr(resourceName, "environment.0.privileged_mode", "false"), - resource.TestCheckResourceAttr(resourceName, "environment.0.type", codebuild.EnvironmentTypeLinuxContainer), - resource.TestCheckResourceAttr(resourceName, "environment.0.image_pull_credentials_type", codebuild.ImagePullCredentialsTypeCodebuild), - resource.TestCheckResourceAttr(resourceName, "logs_config.0.cloudwatch_logs.0.status", codebuild.LogsConfigStatusTypeEnabled), - resource.TestCheckResourceAttr(resourceName, "logs_config.0.s3_logs.0.status", codebuild.LogsConfigStatusTypeDisabled), + resource.TestCheckResourceAttr(resourceName, "environment.0.type", string(types.EnvironmentTypeLinuxContainer)), + resource.TestCheckResourceAttr(resourceName, "environment.0.image_pull_credentials_type", string(types.ImagePullCredentialsTypeCodebuild)), + resource.TestCheckResourceAttr(resourceName, "logs_config.0.cloudwatch_logs.0.status", string(types.LogsConfigStatusTypeEnabled)), + resource.TestCheckResourceAttr(resourceName, "logs_config.0.s3_logs.0.status", string(types.LogsConfigStatusTypeDisabled)), resource.TestCheckResourceAttr(resourceName, "project_visibility", "PRIVATE"), resource.TestCheckResourceAttrPair(resourceName, "service_role", roleResourceName, "arn"), resource.TestCheckResourceAttr(resourceName, "source.#", "1"), @@ -118,7 +119,7 @@ func TestAccCodeBuildProject_basic(t *testing.T) { func TestAccCodeBuildProject_publicVisibility(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" @@ -126,7 +127,7 @@ func TestAccCodeBuildProject_publicVisibility(t *testing.T) { resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -164,13 +165,13 @@ func TestAccCodeBuildProject_publicVisibility(t *testing.T) { func TestAccCodeBuildProject_badgeEnabled(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -193,13 +194,13 @@ func TestAccCodeBuildProject_badgeEnabled(t *testing.T) { func TestAccCodeBuildProject_buildTimeout(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -228,13 +229,13 @@ func TestAccCodeBuildProject_buildTimeout(t *testing.T) { func TestAccCodeBuildProject_queuedTimeout(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -263,7 +264,7 @@ func TestAccCodeBuildProject_queuedTimeout(t *testing.T) { func TestAccCodeBuildProject_cache(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" s3Location1 := rName + "-1" @@ -271,7 +272,7 @@ func TestAccCodeBuildProject_cache(t *testing.T) { resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -280,11 +281,11 @@ func TestAccCodeBuildProject_cache(t *testing.T) { ExpectError: regexache.MustCompile(`cache location is required when cache type is "S3"`), }, { - Config: testAccProjectConfig_cache(rName, "", codebuild.CacheTypeNoCache), + Config: testAccProjectConfig_cache(rName, "", string(types.CacheTypeNoCache)), Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists(ctx, resourceName, &project), resource.TestCheckResourceAttr(resourceName, "cache.#", "1"), - resource.TestCheckResourceAttr(resourceName, "cache.0.type", codebuild.CacheTypeNoCache), + resource.TestCheckResourceAttr(resourceName, "cache.0.type", string(types.CacheTypeNoCache)), ), }, { @@ -297,7 +298,7 @@ func TestAccCodeBuildProject_cache(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists(ctx, resourceName, &project), resource.TestCheckResourceAttr(resourceName, "cache.#", "1"), - resource.TestCheckResourceAttr(resourceName, "cache.0.type", codebuild.CacheTypeNoCache), + resource.TestCheckResourceAttr(resourceName, "cache.0.type", string(types.CacheTypeNoCache)), ), }, { @@ -323,7 +324,7 @@ func TestAccCodeBuildProject_cache(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists(ctx, resourceName, &project), resource.TestCheckResourceAttr(resourceName, "cache.#", "1"), - resource.TestCheckResourceAttr(resourceName, "cache.0.type", codebuild.CacheTypeNoCache), + resource.TestCheckResourceAttr(resourceName, "cache.0.type", string(types.CacheTypeNoCache)), ), }, { @@ -340,7 +341,7 @@ func TestAccCodeBuildProject_cache(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists(ctx, resourceName, &project), resource.TestCheckResourceAttr(resourceName, "cache.#", "1"), - resource.TestCheckResourceAttr(resourceName, "cache.0.type", codebuild.CacheTypeS3), + resource.TestCheckResourceAttr(resourceName, "cache.0.type", string(types.CacheTypeS3)), ), }, }, @@ -349,13 +350,13 @@ func TestAccCodeBuildProject_cache(t *testing.T) { func TestAccCodeBuildProject_description(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -384,13 +385,13 @@ func TestAccCodeBuildProject_description(t *testing.T) { func TestAccCodeBuildProject_fileSystemLocations(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID, "efs"), //using efs.EndpointsID will import efs and make linters sad + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID, "efs"), //using efs.EndpointsID will import efs and make linters sad ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -399,17 +400,17 @@ func TestAccCodeBuildProject_fileSystemLocations(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists(ctx, resourceName, &project), resource.TestCheckResourceAttr(resourceName, "environment.#", "1"), - resource.TestCheckResourceAttr(resourceName, "environment.0.compute_type", codebuild.ComputeTypeBuildGeneral1Small), + resource.TestCheckResourceAttr(resourceName, "environment.0.compute_type", string(types.ComputeTypeBuildGeneral1Small)), resource.TestCheckResourceAttr(resourceName, "environment.0.environment_variable.#", "0"), resource.TestCheckResourceAttr(resourceName, "environment.0.image", "2"), resource.TestCheckResourceAttr(resourceName, "environment.0.privileged_mode", "true"), - resource.TestCheckResourceAttr(resourceName, "environment.0.type", codebuild.EnvironmentTypeLinuxContainer), + resource.TestCheckResourceAttr(resourceName, "environment.0.type", string(types.EnvironmentTypeLinuxContainer)), resource.TestCheckResourceAttr(resourceName, "file_system_locations.#", "1"), resource.TestCheckResourceAttr(resourceName, "file_system_locations.0.identifier", "test"), resource.TestMatchResourceAttr(resourceName, "file_system_locations.0.location", regexache.MustCompile(`/directory-path$`)), resource.TestCheckResourceAttr(resourceName, "file_system_locations.0.mount_options", "nfsvers=4.1,rsize=1048576,wsize=1048576,hard,timeo=450,retrans=3"), resource.TestCheckResourceAttr(resourceName, "file_system_locations.0.mount_point", "/mount1"), - resource.TestCheckResourceAttr(resourceName, "file_system_locations.0.type", codebuild.FileSystemTypeEfs), + resource.TestCheckResourceAttr(resourceName, "file_system_locations.0.type", string(types.FileSystemTypeEfs)), ), }, { @@ -426,7 +427,7 @@ func TestAccCodeBuildProject_fileSystemLocations(t *testing.T) { resource.TestMatchResourceAttr(resourceName, "file_system_locations.0.location", regexache.MustCompile(`/directory-path$`)), resource.TestCheckResourceAttr(resourceName, "file_system_locations.0.mount_options", "nfsvers=4.1,rsize=1048576,wsize=1048576,hard,timeo=450,retrans=3"), resource.TestCheckResourceAttr(resourceName, "file_system_locations.0.mount_point", "/mount2"), - resource.TestCheckResourceAttr(resourceName, "file_system_locations.0.type", codebuild.FileSystemTypeEfs), + resource.TestCheckResourceAttr(resourceName, "file_system_locations.0.type", string(types.FileSystemTypeEfs)), ), }, }, @@ -435,13 +436,13 @@ func TestAccCodeBuildProject_fileSystemLocations(t *testing.T) { func TestAccCodeBuildProject_sourceVersion(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -458,13 +459,13 @@ func TestAccCodeBuildProject_sourceVersion(t *testing.T) { func TestAccCodeBuildProject_encryptionKey(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -486,13 +487,13 @@ func TestAccCodeBuildProject_encryptionKey(t *testing.T) { func TestAccCodeBuildProject_Environment_environmentVariable(t *testing.T) { ctx := acctest.Context(t) - var project1, project2, project3 codebuild.Project + var project1, project2, project3 types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -536,22 +537,22 @@ func TestAccCodeBuildProject_Environment_environmentVariable(t *testing.T) { func TestAccCodeBuildProject_EnvironmentEnvironmentVariable_type(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ { - Config: testAccProjectConfig_environmentVariableType(rName, codebuild.EnvironmentVariableTypePlaintext), + Config: testAccProjectConfig_environmentVariableType(rName, string(types.EnvironmentVariableTypePlaintext)), Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists(ctx, resourceName, &project), - resource.TestCheckResourceAttr(resourceName, "environment.0.environment_variable.0.type", codebuild.EnvironmentVariableTypePlaintext), - resource.TestCheckResourceAttr(resourceName, "environment.0.environment_variable.1.type", codebuild.EnvironmentVariableTypePlaintext), + resource.TestCheckResourceAttr(resourceName, "environment.0.environment_variable.0.type", string(types.EnvironmentVariableTypePlaintext)), + resource.TestCheckResourceAttr(resourceName, "environment.0.environment_variable.1.type", string(types.EnvironmentVariableTypePlaintext)), ), }, { @@ -560,19 +561,19 @@ func TestAccCodeBuildProject_EnvironmentEnvironmentVariable_type(t *testing.T) { ImportStateVerify: true, }, { - Config: testAccProjectConfig_environmentVariableType(rName, codebuild.EnvironmentVariableTypeParameterStore), + Config: testAccProjectConfig_environmentVariableType(rName, string(types.EnvironmentVariableTypeParameterStore)), Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists(ctx, resourceName, &project), - resource.TestCheckResourceAttr(resourceName, "environment.0.environment_variable.0.type", codebuild.EnvironmentVariableTypePlaintext), - resource.TestCheckResourceAttr(resourceName, "environment.0.environment_variable.1.type", codebuild.EnvironmentVariableTypeParameterStore), + resource.TestCheckResourceAttr(resourceName, "environment.0.environment_variable.0.type", string(types.EnvironmentVariableTypePlaintext)), + resource.TestCheckResourceAttr(resourceName, "environment.0.environment_variable.1.type", string(types.EnvironmentVariableTypeParameterStore)), ), }, { - Config: testAccProjectConfig_environmentVariableType(rName, codebuild.EnvironmentVariableTypeSecretsManager), + Config: testAccProjectConfig_environmentVariableType(rName, string(types.EnvironmentVariableTypeSecretsManager)), Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists(ctx, resourceName, &project), - resource.TestCheckResourceAttr(resourceName, "environment.0.environment_variable.0.type", codebuild.EnvironmentVariableTypePlaintext), - resource.TestCheckResourceAttr(resourceName, "environment.0.environment_variable.1.type", codebuild.EnvironmentVariableTypeSecretsManager), + resource.TestCheckResourceAttr(resourceName, "environment.0.environment_variable.0.type", string(types.EnvironmentVariableTypePlaintext)), + resource.TestCheckResourceAttr(resourceName, "environment.0.environment_variable.1.type", string(types.EnvironmentVariableTypeSecretsManager)), ), }, }, @@ -581,13 +582,13 @@ func TestAccCodeBuildProject_EnvironmentEnvironmentVariable_type(t *testing.T) { func TestAccCodeBuildProject_EnvironmentEnvironmentVariable_value(t *testing.T) { ctx := acctest.Context(t) - var project1, project2, project3 codebuild.Project + var project1, project2, project3 types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -630,14 +631,14 @@ func TestAccCodeBuildProject_EnvironmentEnvironmentVariable_value(t *testing.T) func TestAccCodeBuildProject_Environment_certificate(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) oName := "certificate.pem" resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -659,39 +660,39 @@ func TestAccCodeBuildProject_Environment_certificate(t *testing.T) { func TestAccCodeBuildProject_Logs_cloudWatchLogs(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ { - Config: testAccProjectConfig_cloudWatchLogs(rName, codebuild.LogsConfigStatusTypeEnabled, "group-name", ""), + Config: testAccProjectConfig_cloudWatchLogs(rName, string(types.LogsConfigStatusTypeEnabled), "group-name", ""), Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists(ctx, resourceName, &project), - resource.TestCheckResourceAttr(resourceName, "logs_config.0.cloudwatch_logs.0.status", codebuild.LogsConfigStatusTypeEnabled), + resource.TestCheckResourceAttr(resourceName, "logs_config.0.cloudwatch_logs.0.status", string(types.LogsConfigStatusTypeEnabled)), resource.TestCheckResourceAttr(resourceName, "logs_config.0.cloudwatch_logs.0.group_name", "group-name"), resource.TestCheckResourceAttr(resourceName, "logs_config.0.cloudwatch_logs.0.stream_name", ""), ), }, { - Config: testAccProjectConfig_cloudWatchLogs(rName, codebuild.LogsConfigStatusTypeEnabled, "group-name", "stream-name"), + Config: testAccProjectConfig_cloudWatchLogs(rName, string(types.LogsConfigStatusTypeEnabled), "group-name", "stream-name"), Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists(ctx, resourceName, &project), - resource.TestCheckResourceAttr(resourceName, "logs_config.0.cloudwatch_logs.0.status", codebuild.LogsConfigStatusTypeEnabled), + resource.TestCheckResourceAttr(resourceName, "logs_config.0.cloudwatch_logs.0.status", string(types.LogsConfigStatusTypeEnabled)), resource.TestCheckResourceAttr(resourceName, "logs_config.0.cloudwatch_logs.0.group_name", "group-name"), resource.TestCheckResourceAttr(resourceName, "logs_config.0.cloudwatch_logs.0.stream_name", "stream-name"), ), }, { - Config: testAccProjectConfig_cloudWatchLogs(rName, codebuild.LogsConfigStatusTypeDisabled, "", ""), + Config: testAccProjectConfig_cloudWatchLogs(rName, string(types.LogsConfigStatusTypeDisabled), "", ""), Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists(ctx, resourceName, &project), - resource.TestCheckResourceAttr(resourceName, "logs_config.0.cloudwatch_logs.0.status", codebuild.LogsConfigStatusTypeDisabled), + resource.TestCheckResourceAttr(resourceName, "logs_config.0.cloudwatch_logs.0.status", string(types.LogsConfigStatusTypeDisabled)), ), }, { @@ -705,39 +706,39 @@ func TestAccCodeBuildProject_Logs_cloudWatchLogs(t *testing.T) { func TestAccCodeBuildProject_Logs_s3Logs(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ { - Config: testAccProjectConfig_s3Logs(rName, codebuild.LogsConfigStatusTypeEnabled, rName+"/build-log", false), + Config: testAccProjectConfig_s3Logs(rName, string(types.LogsConfigStatusTypeEnabled), rName+"/build-log", false), Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists(ctx, resourceName, &project), - resource.TestCheckResourceAttr(resourceName, "logs_config.0.s3_logs.0.status", codebuild.LogsConfigStatusTypeEnabled), + resource.TestCheckResourceAttr(resourceName, "logs_config.0.s3_logs.0.status", string(types.LogsConfigStatusTypeEnabled)), resource.TestCheckResourceAttr(resourceName, "logs_config.0.s3_logs.0.location", rName+"/build-log"), resource.TestCheckResourceAttr(resourceName, "logs_config.0.s3_logs.0.encryption_disabled", "false"), ), }, { - Config: testAccProjectConfig_s3Logs(rName, codebuild.LogsConfigStatusTypeEnabled, rName+"/build-log", true), + Config: testAccProjectConfig_s3Logs(rName, string(types.LogsConfigStatusTypeEnabled), rName+"/build-log", true), Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists(ctx, resourceName, &project), - resource.TestCheckResourceAttr(resourceName, "logs_config.0.s3_logs.0.status", codebuild.LogsConfigStatusTypeEnabled), + resource.TestCheckResourceAttr(resourceName, "logs_config.0.s3_logs.0.status", string(types.LogsConfigStatusTypeEnabled)), resource.TestCheckResourceAttr(resourceName, "logs_config.0.s3_logs.0.location", rName+"/build-log"), resource.TestCheckResourceAttr(resourceName, "logs_config.0.s3_logs.0.encryption_disabled", "true"), ), }, { - Config: testAccProjectConfig_s3Logs(rName, codebuild.LogsConfigStatusTypeDisabled, "", false), + Config: testAccProjectConfig_s3Logs(rName, string(types.LogsConfigStatusTypeDisabled), "", false), Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists(ctx, resourceName, &project), - resource.TestCheckResourceAttr(resourceName, "logs_config.0.s3_logs.0.status", codebuild.LogsConfigStatusTypeDisabled), + resource.TestCheckResourceAttr(resourceName, "logs_config.0.s3_logs.0.status", string(types.LogsConfigStatusTypeDisabled)), ), }, { @@ -751,7 +752,7 @@ func TestAccCodeBuildProject_Logs_s3Logs(t *testing.T) { func TestAccCodeBuildProject_buildBatch(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" @@ -761,7 +762,7 @@ func TestAccCodeBuildProject_buildBatch(t *testing.T) { resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -800,13 +801,13 @@ func TestAccCodeBuildProject_buildBatch(t *testing.T) { func TestAccCodeBuildProject_buildBatchConfigDelete(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -839,13 +840,13 @@ func TestAccCodeBuildProject_buildBatchConfigDelete(t *testing.T) { func TestAccCodeBuildProject_Source_gitCloneDepth(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -874,13 +875,13 @@ func TestAccCodeBuildProject_Source_gitCloneDepth(t *testing.T) { func TestAccCodeBuildProject_SourceGitSubmodules_codeCommit(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -911,13 +912,13 @@ func TestAccCodeBuildProject_SourceGitSubmodules_codeCommit(t *testing.T) { func TestAccCodeBuildProject_SourceGitSubmodules_gitHub(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -944,13 +945,13 @@ func TestAccCodeBuildProject_SourceGitSubmodules_gitHub(t *testing.T) { func TestAccCodeBuildProject_SourceGitSubmodules_gitHubEnterprise(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -977,13 +978,13 @@ func TestAccCodeBuildProject_SourceGitSubmodules_gitHubEnterprise(t *testing.T) func TestAccCodeBuildProject_SecondarySourcesGitSubmodules_codeCommit(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -1039,13 +1040,13 @@ func TestAccCodeBuildProject_SecondarySourcesGitSubmodules_codeCommit(t *testing func TestAccCodeBuildProject_SecondarySourcesGitSubmodules_gitHub(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -1072,13 +1073,13 @@ func TestAccCodeBuildProject_SecondarySourcesGitSubmodules_gitHub(t *testing.T) func TestAccCodeBuildProject_SecondarySourcesGitSubmodules_gitHubEnterprise(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -1105,13 +1106,13 @@ func TestAccCodeBuildProject_SecondarySourcesGitSubmodules_gitHubEnterprise(t *t func TestAccCodeBuildProject_SecondarySourcesVersions(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -1198,7 +1199,7 @@ func TestAccCodeBuildProject_SecondarySourcesVersions(t *testing.T) { func TestAccCodeBuildProject_SourceBuildStatus_gitHubEnterprise(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" @@ -1208,7 +1209,7 @@ func TestAccCodeBuildProject_SourceBuildStatus_gitHubEnterprise(t *testing.T) { resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -1229,13 +1230,13 @@ func TestAccCodeBuildProject_SourceBuildStatus_gitHubEnterprise(t *testing.T) { func TestAccCodeBuildProject_Source_insecureSSL(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -1264,7 +1265,7 @@ func TestAccCodeBuildProject_Source_insecureSSL(t *testing.T) { func TestAccCodeBuildProject_SourceReportBuildStatus_bitbucket(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" @@ -1272,7 +1273,7 @@ func TestAccCodeBuildProject_SourceReportBuildStatus_bitbucket(t *testing.T) { resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -1301,13 +1302,13 @@ func TestAccCodeBuildProject_SourceReportBuildStatus_bitbucket(t *testing.T) { func TestAccCodeBuildProject_SourceReportBuildStatus_gitHub(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -1336,13 +1337,13 @@ func TestAccCodeBuildProject_SourceReportBuildStatus_gitHub(t *testing.T) { func TestAccCodeBuildProject_SourceReportBuildStatus_gitHubEnterprise(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -1371,7 +1372,7 @@ func TestAccCodeBuildProject_SourceReportBuildStatus_gitHubEnterprise(t *testing func TestAccCodeBuildProject_SourceType_bitbucket(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" @@ -1379,7 +1380,7 @@ func TestAccCodeBuildProject_SourceType_bitbucket(t *testing.T) { resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -1402,13 +1403,13 @@ func TestAccCodeBuildProject_SourceType_bitbucket(t *testing.T) { func TestAccCodeBuildProject_SourceType_codeCommit(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -1430,13 +1431,13 @@ func TestAccCodeBuildProject_SourceType_codeCommit(t *testing.T) { func TestAccCodeBuildProject_SourceType_codePipeline(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -1458,13 +1459,13 @@ func TestAccCodeBuildProject_SourceType_codePipeline(t *testing.T) { func TestAccCodeBuildProject_SourceType_gitHubEnterprise(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -1486,13 +1487,13 @@ func TestAccCodeBuildProject_SourceType_gitHubEnterprise(t *testing.T) { func TestAccCodeBuildProject_SourceType_s3(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -1513,7 +1514,7 @@ func TestAccCodeBuildProject_SourceType_s3(t *testing.T) { func TestAccCodeBuildProject_SourceType_noSource(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" rBuildspec := ` @@ -1526,7 +1527,7 @@ phases: resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -1559,7 +1560,7 @@ phases: resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -1577,13 +1578,13 @@ phases: func TestAccCodeBuildProject_tags(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -1616,13 +1617,13 @@ func TestAccCodeBuildProject_tags(t *testing.T) { func TestAccCodeBuildProject_vpc(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -1664,13 +1665,13 @@ func TestAccCodeBuildProject_vpc(t *testing.T) { func TestAccCodeBuildProject_windowsServer2019Container(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -1679,12 +1680,12 @@ func TestAccCodeBuildProject_windowsServer2019Container(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists(ctx, resourceName, &project), resource.TestCheckResourceAttr(resourceName, "environment.#", "1"), - resource.TestCheckResourceAttr(resourceName, "environment.0.compute_type", codebuild.ComputeTypeBuildGeneral1Medium), + resource.TestCheckResourceAttr(resourceName, "environment.0.compute_type", string(types.ComputeTypeBuildGeneral1Medium)), resource.TestCheckResourceAttr(resourceName, "environment.0.environment_variable.#", "0"), resource.TestCheckResourceAttr(resourceName, "environment.0.image", "2"), resource.TestCheckResourceAttr(resourceName, "environment.0.privileged_mode", "false"), - resource.TestCheckResourceAttr(resourceName, "environment.0.image_pull_credentials_type", codebuild.ImagePullCredentialsTypeCodebuild), - resource.TestCheckResourceAttr(resourceName, "environment.0.type", codebuild.EnvironmentTypeWindowsServer2019Container), + resource.TestCheckResourceAttr(resourceName, "environment.0.image_pull_credentials_type", string(types.ImagePullCredentialsTypeCodebuild)), + resource.TestCheckResourceAttr(resourceName, "environment.0.type", string(types.EnvironmentTypeWindowsServer2019Container)), ), }, { @@ -1698,13 +1699,13 @@ func TestAccCodeBuildProject_windowsServer2019Container(t *testing.T) { func TestAccCodeBuildProject_armContainer(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -1725,13 +1726,13 @@ func TestAccCodeBuildProject_armContainer(t *testing.T) { func TestAccCodeBuildProject_linuxLambdaContainer(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -1740,12 +1741,12 @@ func TestAccCodeBuildProject_linuxLambdaContainer(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists(ctx, resourceName, &project), resource.TestCheckResourceAttr(resourceName, "environment.#", "1"), - resource.TestCheckResourceAttr(resourceName, "environment.0.compute_type", codebuild.ComputeTypeBuildLambda1gb), + resource.TestCheckResourceAttr(resourceName, "environment.0.compute_type", string(types.ComputeTypeBuildLambda1gb)), resource.TestCheckResourceAttr(resourceName, "environment.0.environment_variable.#", "0"), resource.TestCheckResourceAttr(resourceName, "environment.0.image", "aws/codebuild/amazonlinux-x86_64-lambda-standard:go1.21"), resource.TestCheckResourceAttr(resourceName, "environment.0.privileged_mode", "false"), - resource.TestCheckResourceAttr(resourceName, "environment.0.image_pull_credentials_type", codebuild.ImagePullCredentialsTypeCodebuild), - resource.TestCheckResourceAttr(resourceName, "environment.0.type", codebuild.EnvironmentTypeLinuxLambdaContainer), + resource.TestCheckResourceAttr(resourceName, "environment.0.image_pull_credentials_type", string(types.ImagePullCredentialsTypeCodebuild)), + resource.TestCheckResourceAttr(resourceName, "environment.0.type", string(types.EnvironmentTypeLinuxLambdaContainer)), ), }, { @@ -1759,7 +1760,7 @@ func TestAccCodeBuildProject_linuxLambdaContainer(t *testing.T) { func TestAccCodeBuildProject_Artifacts_artifactIdentifier(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" @@ -1768,7 +1769,7 @@ func TestAccCodeBuildProject_Artifacts_artifactIdentifier(t *testing.T) { resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -1799,13 +1800,13 @@ func TestAccCodeBuildProject_Artifacts_artifactIdentifier(t *testing.T) { func TestAccCodeBuildProject_Artifacts_encryptionDisabled(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -1836,14 +1837,14 @@ func TestAccCodeBuildProject_Artifacts_encryptionDisabled(t *testing.T) { func TestAccCodeBuildProject_Artifacts_location(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName1 := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) rName2 := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -1874,7 +1875,7 @@ func TestAccCodeBuildProject_Artifacts_location(t *testing.T) { func TestAccCodeBuildProject_Artifacts_name(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" @@ -1883,7 +1884,7 @@ func TestAccCodeBuildProject_Artifacts_name(t *testing.T) { resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -1914,22 +1915,22 @@ func TestAccCodeBuildProject_Artifacts_name(t *testing.T) { func TestAccCodeBuildProject_Artifacts_namespaceType(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ { - Config: testAccProjectConfig_artifactsNamespaceType(rName, codebuild.ArtifactNamespaceBuildId), + Config: testAccProjectConfig_artifactsNamespaceType(rName, string(types.ArtifactNamespaceBuildId)), Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists(ctx, resourceName, &project), resource.TestCheckResourceAttr(resourceName, "artifacts.#", "1"), - resource.TestCheckResourceAttr(resourceName, "artifacts.0.namespace_type", codebuild.ArtifactNamespaceBuildId), + resource.TestCheckResourceAttr(resourceName, "artifacts.0.namespace_type", string(types.ArtifactNamespaceBuildId)), ), }, { @@ -1938,11 +1939,11 @@ func TestAccCodeBuildProject_Artifacts_namespaceType(t *testing.T) { ImportStateVerify: true, }, { - Config: testAccProjectConfig_artifactsNamespaceType(rName, codebuild.ArtifactNamespaceNone), + Config: testAccProjectConfig_artifactsNamespaceType(rName, string(types.ArtifactNamespaceNone)), Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists(ctx, resourceName, &project), resource.TestCheckResourceAttr(resourceName, "artifacts.#", "1"), - resource.TestCheckResourceAttr(resourceName, "artifacts.0.namespace_type", codebuild.ArtifactNamespaceNone), + resource.TestCheckResourceAttr(resourceName, "artifacts.0.namespace_type", string(types.ArtifactNamespaceNone)), ), }, }, @@ -1951,13 +1952,13 @@ func TestAccCodeBuildProject_Artifacts_namespaceType(t *testing.T) { func TestAccCodeBuildProject_Artifacts_overrideArtifactName(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -1988,22 +1989,22 @@ func TestAccCodeBuildProject_Artifacts_overrideArtifactName(t *testing.T) { func TestAccCodeBuildProject_Artifacts_packaging(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ { - Config: testAccProjectConfig_artifactsPackaging(rName, codebuild.ArtifactPackagingZip), + Config: testAccProjectConfig_artifactsPackaging(rName, string(types.ArtifactPackagingZip)), Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists(ctx, resourceName, &project), resource.TestCheckResourceAttr(resourceName, "artifacts.#", "1"), - resource.TestCheckResourceAttr(resourceName, "artifacts.0.packaging", codebuild.ArtifactPackagingZip), + resource.TestCheckResourceAttr(resourceName, "artifacts.0.packaging", string(types.ArtifactPackagingZip)), ), }, { @@ -2012,11 +2013,11 @@ func TestAccCodeBuildProject_Artifacts_packaging(t *testing.T) { ImportStateVerify: true, }, { - Config: testAccProjectConfig_artifactsPackaging(rName, codebuild.ArtifactPackagingNone), + Config: testAccProjectConfig_artifactsPackaging(rName, string(types.ArtifactPackagingNone)), Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists(ctx, resourceName, &project), resource.TestCheckResourceAttr(resourceName, "artifacts.#", "1"), - resource.TestCheckResourceAttr(resourceName, "artifacts.0.packaging", codebuild.ArtifactPackagingNone), + resource.TestCheckResourceAttr(resourceName, "artifacts.0.packaging", string(types.ArtifactPackagingNone)), ), }, }, @@ -2025,13 +2026,13 @@ func TestAccCodeBuildProject_Artifacts_packaging(t *testing.T) { func TestAccCodeBuildProject_Artifacts_path(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -2062,16 +2063,16 @@ func TestAccCodeBuildProject_Artifacts_path(t *testing.T) { func TestAccCodeBuildProject_Artifacts_type(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" - type1 := codebuild.ArtifactsTypeS3 - type2 := codebuild.ArtifactsTypeCodepipeline + type1 := string(types.ArtifactsTypeS3) + type2 := string(types.ArtifactsTypeCodepipeline) resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -2102,13 +2103,13 @@ func TestAccCodeBuildProject_Artifacts_type(t *testing.T) { func TestAccCodeBuildProject_Artifacts_bucketOwnerAccess(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -2139,13 +2140,13 @@ func TestAccCodeBuildProject_Artifacts_bucketOwnerAccess(t *testing.T) { func TestAccCodeBuildProject_secondaryArtifacts(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -2174,7 +2175,7 @@ func TestAccCodeBuildProject_secondaryArtifacts(t *testing.T) { func TestAccCodeBuildProject_SecondaryArtifacts_artifactIdentifier(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" @@ -2183,7 +2184,7 @@ func TestAccCodeBuildProject_SecondaryArtifacts_artifactIdentifier(t *testing.T) resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -2218,13 +2219,13 @@ func TestAccCodeBuildProject_SecondaryArtifacts_artifactIdentifier(t *testing.T) func TestAccCodeBuildProject_SecondaryArtifacts_overrideArtifactName(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -2259,13 +2260,13 @@ func TestAccCodeBuildProject_SecondaryArtifacts_overrideArtifactName(t *testing. func TestAccCodeBuildProject_SecondaryArtifacts_encryptionDisabled(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -2300,14 +2301,14 @@ func TestAccCodeBuildProject_SecondaryArtifacts_encryptionDisabled(t *testing.T) func TestAccCodeBuildProject_SecondaryArtifacts_location(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName1 := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) rName2 := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -2345,7 +2346,7 @@ func TestAccCodeBuildProject_SecondaryArtifacts_name(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" @@ -2354,7 +2355,7 @@ func TestAccCodeBuildProject_SecondaryArtifacts_name(t *testing.T) { resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -2389,23 +2390,23 @@ func TestAccCodeBuildProject_SecondaryArtifacts_name(t *testing.T) { func TestAccCodeBuildProject_SecondaryArtifacts_namespaceType(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ { - Config: testAccProjectConfig_secondaryArtifactsNamespaceType(rName, codebuild.ArtifactNamespaceBuildId), + Config: testAccProjectConfig_secondaryArtifactsNamespaceType(rName, string(types.ArtifactNamespaceBuildId)), Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists(ctx, resourceName, &project), resource.TestCheckResourceAttr(resourceName, "secondary_artifacts.#", "1"), resource.TestCheckTypeSetElemNestedAttrs(resourceName, "secondary_artifacts.*", map[string]string{ - "namespace_type": codebuild.ArtifactNamespaceBuildId, + "namespace_type": string(types.ArtifactNamespaceBuildId), }), ), }, @@ -2415,12 +2416,12 @@ func TestAccCodeBuildProject_SecondaryArtifacts_namespaceType(t *testing.T) { ImportStateVerify: true, }, { - Config: testAccProjectConfig_secondaryArtifactsNamespaceType(rName, codebuild.ArtifactNamespaceNone), + Config: testAccProjectConfig_secondaryArtifactsNamespaceType(rName, string(types.ArtifactNamespaceNone)), Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists(ctx, resourceName, &project), resource.TestCheckResourceAttr(resourceName, "secondary_artifacts.#", "1"), resource.TestCheckTypeSetElemNestedAttrs(resourceName, "secondary_artifacts.*", map[string]string{ - "namespace_type": codebuild.ArtifactNamespaceNone, + "namespace_type": string(types.ArtifactNamespaceNone), }), ), }, @@ -2430,23 +2431,23 @@ func TestAccCodeBuildProject_SecondaryArtifacts_namespaceType(t *testing.T) { func TestAccCodeBuildProject_SecondaryArtifacts_packaging(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ { - Config: testAccProjectConfig_secondaryArtifactsPackaging(rName, codebuild.ArtifactPackagingZip), + Config: testAccProjectConfig_secondaryArtifactsPackaging(rName, string(types.ArtifactPackagingZip)), Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists(ctx, resourceName, &project), resource.TestCheckResourceAttr(resourceName, "secondary_artifacts.#", "1"), resource.TestCheckTypeSetElemNestedAttrs(resourceName, "secondary_artifacts.*", map[string]string{ - "packaging": codebuild.ArtifactPackagingZip, + "packaging": string(types.ArtifactPackagingZip), }), ), }, @@ -2456,12 +2457,12 @@ func TestAccCodeBuildProject_SecondaryArtifacts_packaging(t *testing.T) { ImportStateVerify: true, }, { - Config: testAccProjectConfig_secondaryArtifactsPackaging(rName, codebuild.ArtifactPackagingNone), + Config: testAccProjectConfig_secondaryArtifactsPackaging(rName, string(types.ArtifactPackagingNone)), Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists(ctx, resourceName, &project), resource.TestCheckResourceAttr(resourceName, "secondary_artifacts.#", "1"), resource.TestCheckTypeSetElemNestedAttrs(resourceName, "secondary_artifacts.*", map[string]string{ - "packaging": codebuild.ArtifactPackagingNone, + "packaging": string(types.ArtifactPackagingNone), }), ), }, @@ -2471,7 +2472,7 @@ func TestAccCodeBuildProject_SecondaryArtifacts_packaging(t *testing.T) { func TestAccCodeBuildProject_SecondaryArtifacts_path(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" @@ -2480,7 +2481,7 @@ func TestAccCodeBuildProject_SecondaryArtifacts_path(t *testing.T) { resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -2515,23 +2516,23 @@ func TestAccCodeBuildProject_SecondaryArtifacts_path(t *testing.T) { func TestAccCodeBuildProject_SecondaryArtifacts_type(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ { - Config: testAccProjectConfig_secondaryArtifactsType(rName, codebuild.ArtifactsTypeS3), + Config: testAccProjectConfig_secondaryArtifactsType(rName, string(types.ArtifactsTypeS3)), Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists(ctx, resourceName, &project), resource.TestCheckResourceAttr(resourceName, "secondary_artifacts.#", "1"), resource.TestCheckTypeSetElemNestedAttrs(resourceName, "secondary_artifacts.*", map[string]string{ - "type": codebuild.ArtifactsTypeS3, + "type": string(types.ArtifactsTypeS3), }), ), }, @@ -2546,13 +2547,13 @@ func TestAccCodeBuildProject_SecondaryArtifacts_type(t *testing.T) { func TestAccCodeBuildProject_SecondarySources_codeCommit(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -2603,14 +2604,14 @@ func TestProject_nameValidation(t *testing.T) { func TestAccCodeBuildProject_concurrentBuildLimit(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ { @@ -2638,13 +2639,13 @@ func TestAccCodeBuildProject_concurrentBuildLimit(t *testing.T) { func TestAccCodeBuildProject_Environment_registryCredential(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -2671,14 +2672,14 @@ func TestAccCodeBuildProject_Environment_registryCredential(t *testing.T) { func TestAccCodeBuildProject_disappears(t *testing.T) { ctx := acctest.Context(t) - var project codebuild.Project + var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ @@ -2687,7 +2688,6 @@ func TestAccCodeBuildProject_disappears(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists(ctx, resourceName, &project), acctest.CheckResourceDisappears(ctx, acctest.Provider, tfcodebuild.ResourceProject(), resourceName), - acctest.CheckResourceDisappears(ctx, acctest.Provider, tfcodebuild.ResourceProject(), resourceName), ), ExpectNonEmptyPlan: true, }, @@ -2695,29 +2695,22 @@ func TestAccCodeBuildProject_disappears(t *testing.T) { }) } -func testAccCheckProjectExists(ctx context.Context, n string, project *codebuild.Project) resource.TestCheckFunc { +func testAccCheckProjectExists(ctx context.Context, n string, v *types.Project) resource.TestCheckFunc { return func(s *terraform.State) error { rs, ok := s.RootModule().Resources[n] if !ok { return fmt.Errorf("Not found: %s", n) } - if rs.Primary.ID == "" { - return fmt.Errorf("No CodeBuild Project ID is set") - } + conn := acctest.Provider.Meta().(*conns.AWSClient).CodeBuildClient(ctx) - conn := acctest.Provider.Meta().(*conns.AWSClient).CodeBuildConn(ctx) + output, err := tfcodebuild.FindProjectByNameOrARN(ctx, conn, rs.Primary.ID) - output, err := tfcodebuild.FindProjectByARN(ctx, conn, rs.Primary.ID) if err != nil { return err } - if output == nil { - return fmt.Errorf("CodeBuild Project (%s) not found", rs.Primary.ID) - } - - *project = *output + *v = *output return nil } @@ -2725,14 +2718,14 @@ func testAccCheckProjectExists(ctx context.Context, n string, project *codebuild func testAccCheckProjectDestroy(ctx context.Context) resource.TestCheckFunc { return func(s *terraform.State) error { - conn := acctest.Provider.Meta().(*conns.AWSClient).CodeBuildConn(ctx) + conn := acctest.Provider.Meta().(*conns.AWSClient).CodeBuildClient(ctx) for _, rs := range s.RootModule().Resources { if rs.Type != "aws_codebuild_project" { continue } - _, err := tfcodebuild.FindProjectByARN(ctx, conn, rs.Primary.ID) + _, err := tfcodebuild.FindProjectByNameOrARN(ctx, conn, rs.Primary.ID) if tfresource.NotFound(err) { continue @@ -2749,19 +2742,19 @@ func testAccCheckProjectDestroy(ctx context.Context) resource.TestCheckFunc { } } -func testAccCheckProjectCertificate(project *codebuild.Project, expectedCertificate string) resource.TestCheckFunc { +func testAccCheckProjectCertificate(project *types.Project, expectedCertificate string) resource.TestCheckFunc { return func(s *terraform.State) error { - if aws.StringValue(project.Environment.Certificate) != expectedCertificate { - return fmt.Errorf("CodeBuild Project certificate (%s) did not match: %s", aws.StringValue(project.Environment.Certificate), expectedCertificate) + if aws.ToString(project.Environment.Certificate) != expectedCertificate { + return fmt.Errorf("CodeBuild Project certificate (%s) did not match: %s", aws.ToString(project.Environment.Certificate), expectedCertificate) } return nil } } func testAccPreCheck(ctx context.Context, t *testing.T) { - conn := acctest.Provider.Meta().(*conns.AWSClient).CodeBuildConn(ctx) + conn := acctest.Provider.Meta().(*conns.AWSClient).CodeBuildClient(ctx) - _, err := tfcodebuild.FindProjectByARN(ctx, conn, "tf-acc-test-precheck") + _, err := tfcodebuild.FindProjectByNameOrARN(ctx, conn, "tf-acc-test-precheck") if acctest.PreCheckSkipError(err) { t.Skipf("skipping acceptance testing: %s", err) diff --git a/internal/service/codebuild/report_group_test.go b/internal/service/codebuild/report_group_test.go index 49b50bb7cf0..1e603ac36b6 100644 --- a/internal/service/codebuild/report_group_test.go +++ b/internal/service/codebuild/report_group_test.go @@ -8,24 +8,27 @@ import ( "fmt" "testing" - "github.com/aws/aws-sdk-go/service/codebuild" + "github.com/aws/aws-sdk-go-v2/service/codebuild" + "github.com/aws/aws-sdk-go-v2/service/codebuild/types" sdkacctest "github.com/hashicorp/terraform-plugin-testing/helper/acctest" "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-plugin-testing/terraform" "github.com/hashicorp/terraform-provider-aws/internal/acctest" "github.com/hashicorp/terraform-provider-aws/internal/conns" tfcodebuild "github.com/hashicorp/terraform-provider-aws/internal/service/codebuild" + "github.com/hashicorp/terraform-provider-aws/internal/tfresource" + "github.com/hashicorp/terraform-provider-aws/names" ) func TestAccCodeBuildReportGroup_basic(t *testing.T) { ctx := acctest.Context(t) - var reportGroup codebuild.ReportGroup + var reportGroup types.ReportGroup rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_report_group.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheckReportGroup(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckReportGroupDestroy(ctx), Steps: []resource.TestStep{ @@ -52,13 +55,13 @@ func TestAccCodeBuildReportGroup_basic(t *testing.T) { func TestAccCodeBuildReportGroup_Export_s3(t *testing.T) { ctx := acctest.Context(t) - var reportGroup codebuild.ReportGroup + var reportGroup types.ReportGroup rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_report_group.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheckReportGroup(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckReportGroupDestroy(ctx), Steps: []resource.TestStep{ @@ -103,13 +106,13 @@ func TestAccCodeBuildReportGroup_Export_s3(t *testing.T) { func TestAccCodeBuildReportGroup_tags(t *testing.T) { ctx := acctest.Context(t) - var reportGroup codebuild.ReportGroup + var reportGroup types.ReportGroup rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_report_group.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheckReportGroup(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckReportGroupDestroy(ctx), Steps: []resource.TestStep{ @@ -150,13 +153,13 @@ func TestAccCodeBuildReportGroup_tags(t *testing.T) { func TestAccCodeBuildReportGroup_deleteReports(t *testing.T) { ctx := acctest.Context(t) - var reportGroup codebuild.ReportGroup + var reportGroup types.ReportGroup rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_report_group.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheckReportGroup(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckReportGroupDestroy(ctx), Steps: []resource.TestStep{ @@ -179,13 +182,13 @@ func TestAccCodeBuildReportGroup_deleteReports(t *testing.T) { func TestAccCodeBuildReportGroup_disappears(t *testing.T) { ctx := acctest.Context(t) - var reportGroup codebuild.ReportGroup + var reportGroup types.ReportGroup rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_report_group.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheckReportGroup(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckReportGroupDestroy(ctx), Steps: []resource.TestStep{ @@ -202,11 +205,11 @@ func TestAccCodeBuildReportGroup_disappears(t *testing.T) { } func testAccPreCheckReportGroup(ctx context.Context, t *testing.T) { - conn := acctest.Provider.Meta().(*conns.AWSClient).CodeBuildConn(ctx) + conn := acctest.Provider.Meta().(*conns.AWSClient).CodeBuildClient(ctx) input := &codebuild.ListReportGroupsInput{} - _, err := conn.ListReportGroupsWithContext(ctx, input) + _, err := conn.ListReportGroups(ctx, input) if acctest.PreCheckSkipError(err) { t.Skipf("skipping acceptance testing: %s", err) @@ -219,45 +222,46 @@ func testAccPreCheckReportGroup(ctx context.Context, t *testing.T) { func testAccCheckReportGroupDestroy(ctx context.Context) resource.TestCheckFunc { return func(s *terraform.State) error { - conn := acctest.Provider.Meta().(*conns.AWSClient).CodeBuildConn(ctx) + conn := acctest.Provider.Meta().(*conns.AWSClient).CodeBuildClient(ctx) for _, rs := range s.RootModule().Resources { if rs.Type != "aws_codebuild_report_group" { continue } - resp, err := tfcodebuild.FindReportGroupByARN(ctx, conn, rs.Primary.ID) + _, err := tfcodebuild.FindReportGroupByARN(ctx, conn, rs.Primary.ID) + + if tfresource.NotFound(err) { + continue + } + if err != nil { return err } - if resp != nil { - return fmt.Errorf("Found Report Group %s", rs.Primary.ID) - } + return fmt.Errorf("CodeBuild Report Group (%s) still exists", rs.Primary.ID) } + return nil } } -func testAccCheckReportGroupExists(ctx context.Context, name string, reportGroup *codebuild.ReportGroup) resource.TestCheckFunc { +func testAccCheckReportGroupExists(ctx context.Context, n string, v *types.ReportGroup) resource.TestCheckFunc { return func(s *terraform.State) error { - rs, ok := s.RootModule().Resources[name] + rs, ok := s.RootModule().Resources[n] if !ok { - return fmt.Errorf("Not found: %s", name) + return fmt.Errorf("Not found: %s", n) } - conn := acctest.Provider.Meta().(*conns.AWSClient).CodeBuildConn(ctx) + conn := acctest.Provider.Meta().(*conns.AWSClient).CodeBuildClient(ctx) + + output, err := tfcodebuild.FindReportGroupByARN(ctx, conn, rs.Primary.ID) - resp, err := tfcodebuild.FindReportGroupByARN(ctx, conn, rs.Primary.ID) if err != nil { return err } - if resp == nil { - return fmt.Errorf("Report Group %s not found", rs.Primary.ID) - } - - *reportGroup = *resp + *v = *output return nil } diff --git a/internal/service/codebuild/resource_policy_test.go b/internal/service/codebuild/resource_policy_test.go index 32077ec9396..e7a713e14c2 100644 --- a/internal/service/codebuild/resource_policy_test.go +++ b/internal/service/codebuild/resource_policy_test.go @@ -8,7 +8,7 @@ import ( "fmt" "testing" - "github.com/aws/aws-sdk-go/service/codebuild" + "github.com/aws/aws-sdk-go-v2/service/codebuild" sdkacctest "github.com/hashicorp/terraform-plugin-testing/helper/acctest" "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-plugin-testing/terraform" @@ -16,6 +16,7 @@ import ( "github.com/hashicorp/terraform-provider-aws/internal/conns" tfcodebuild "github.com/hashicorp/terraform-provider-aws/internal/service/codebuild" "github.com/hashicorp/terraform-provider-aws/internal/tfresource" + "github.com/hashicorp/terraform-provider-aws/names" ) func TestAccCodeBuildResourcePolicy_basic(t *testing.T) { @@ -26,7 +27,7 @@ func TestAccCodeBuildResourcePolicy_basic(t *testing.T) { resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckResourcePolicyDestroy(ctx), Steps: []resource.TestStep{ @@ -55,7 +56,7 @@ func TestAccCodeBuildResourcePolicy_disappears(t *testing.T) { resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckResourcePolicyDestroy(ctx), Steps: []resource.TestStep{ @@ -64,7 +65,6 @@ func TestAccCodeBuildResourcePolicy_disappears(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckResourcePolicyExists(ctx, resourceName, &reportGroup), acctest.CheckResourceDisappears(ctx, acctest.Provider, tfcodebuild.ResourceResourcePolicy(), resourceName), - acctest.CheckResourceDisappears(ctx, acctest.Provider, tfcodebuild.ResourceResourcePolicy(), resourceName), ), ExpectNonEmptyPlan: true, }, @@ -72,7 +72,7 @@ func TestAccCodeBuildResourcePolicy_disappears(t *testing.T) { }) } -func TestAccCodeBuildResourcePolicy_disappears_resource(t *testing.T) { +func TestAccCodeBuildResourcePolicy_Disappears_resource(t *testing.T) { ctx := acctest.Context(t) var reportGroup codebuild.GetResourcePolicyOutput rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) @@ -80,7 +80,7 @@ func TestAccCodeBuildResourcePolicy_disappears_resource(t *testing.T) { resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckResourcePolicyDestroy(ctx), Steps: []resource.TestStep{ @@ -89,7 +89,6 @@ func TestAccCodeBuildResourcePolicy_disappears_resource(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckResourcePolicyExists(ctx, resourceName, &reportGroup), acctest.CheckResourceDisappears(ctx, acctest.Provider, tfcodebuild.ResourceReportGroup(), resourceName), - acctest.CheckResourceDisappears(ctx, acctest.Provider, tfcodebuild.ResourceResourcePolicy(), resourceName), ), ExpectNonEmptyPlan: true, }, @@ -99,14 +98,15 @@ func TestAccCodeBuildResourcePolicy_disappears_resource(t *testing.T) { func testAccCheckResourcePolicyDestroy(ctx context.Context) resource.TestCheckFunc { return func(s *terraform.State) error { - conn := acctest.Provider.Meta().(*conns.AWSClient).CodeBuildConn(ctx) + conn := acctest.Provider.Meta().(*conns.AWSClient).CodeBuildClient(ctx) for _, rs := range s.RootModule().Resources { if rs.Type != "aws_codebuild_resource_policy" { continue } - resp, err := tfcodebuild.FindResourcePolicyByARN(ctx, conn, rs.Primary.ID) + _, err := tfcodebuild.FindResourcePolicyByARN(ctx, conn, rs.Primary.ID) + if tfresource.NotFound(err) { continue } @@ -115,33 +115,29 @@ func testAccCheckResourcePolicyDestroy(ctx context.Context) resource.TestCheckFu return err } - if resp != nil { - return fmt.Errorf("Found Resource Policy %s", rs.Primary.ID) - } + return fmt.Errorf("CodeBuild Resource Policy (%s) still exists", rs.Primary.ID) } + return nil } } -func testAccCheckResourcePolicyExists(ctx context.Context, name string, policy *codebuild.GetResourcePolicyOutput) resource.TestCheckFunc { +func testAccCheckResourcePolicyExists(ctx context.Context, n string, v *codebuild.GetResourcePolicyOutput) resource.TestCheckFunc { return func(s *terraform.State) error { - rs, ok := s.RootModule().Resources[name] + rs, ok := s.RootModule().Resources[n] if !ok { - return fmt.Errorf("Not found: %s", name) + return fmt.Errorf("Not found: %s", n) } - conn := acctest.Provider.Meta().(*conns.AWSClient).CodeBuildConn(ctx) + conn := acctest.Provider.Meta().(*conns.AWSClient).CodeBuildClient(ctx) + + output, err := tfcodebuild.FindResourcePolicyByARN(ctx, conn, rs.Primary.ID) - resp, err := tfcodebuild.FindResourcePolicyByARN(ctx, conn, rs.Primary.ID) if err != nil { return err } - if resp == nil { - return fmt.Errorf("Resource Policy %s not found", rs.Primary.ID) - } - - *policy = *resp + *v = *output return nil } diff --git a/internal/service/codebuild/source_credential_test.go b/internal/service/codebuild/source_credential_test.go index 51a617fb54b..61a0c5342fc 100644 --- a/internal/service/codebuild/source_credential_test.go +++ b/internal/service/codebuild/source_credential_test.go @@ -9,7 +9,7 @@ import ( "testing" "github.com/YakDriver/regexache" - "github.com/aws/aws-sdk-go/service/codebuild" + "github.com/aws/aws-sdk-go-v2/service/codebuild/types" sdkacctest "github.com/hashicorp/terraform-plugin-testing/helper/acctest" "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-plugin-testing/terraform" @@ -17,17 +17,18 @@ import ( "github.com/hashicorp/terraform-provider-aws/internal/conns" tfcodebuild "github.com/hashicorp/terraform-provider-aws/internal/service/codebuild" "github.com/hashicorp/terraform-provider-aws/internal/tfresource" + "github.com/hashicorp/terraform-provider-aws/names" ) func TestAccCodeBuildSourceCredential_basic(t *testing.T) { ctx := acctest.Context(t) - var sourceCredentialsInfo codebuild.SourceCredentialsInfo + var sourceCredentialsInfo types.SourceCredentialsInfo token := sdkacctest.RandomWithPrefix("token") resourceName := "aws_codebuild_source_credential.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckSourceCredentialDestroy(ctx), Steps: []resource.TestStep{ @@ -61,13 +62,13 @@ func TestAccCodeBuildSourceCredential_basic(t *testing.T) { func TestAccCodeBuildSourceCredential_basicAuth(t *testing.T) { ctx := acctest.Context(t) - var sourceCredentialsInfo codebuild.SourceCredentialsInfo + var sourceCredentialsInfo types.SourceCredentialsInfo token := sdkacctest.RandomWithPrefix("token") resourceName := "aws_codebuild_source_credential.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckSourceCredentialDestroy(ctx), Steps: []resource.TestStep{ @@ -100,13 +101,13 @@ func TestAccCodeBuildSourceCredential_basicAuth(t *testing.T) { func TestAccCodeBuildSourceCredential_disappears(t *testing.T) { ctx := acctest.Context(t) - var sourceCredentialsInfo codebuild.SourceCredentialsInfo + var sourceCredentialsInfo types.SourceCredentialsInfo token := sdkacctest.RandomWithPrefix("token") resourceName := "aws_codebuild_source_credential.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckSourceCredentialDestroy(ctx), Steps: []resource.TestStep{ @@ -115,7 +116,6 @@ func TestAccCodeBuildSourceCredential_disappears(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckSourceCredentialExists(ctx, resourceName, &sourceCredentialsInfo), acctest.CheckResourceDisappears(ctx, acctest.Provider, tfcodebuild.ResourceSourceCredential(), resourceName), - acctest.CheckResourceDisappears(ctx, acctest.Provider, tfcodebuild.ResourceSourceCredential(), resourceName), ), ExpectNonEmptyPlan: true, }, @@ -125,14 +125,14 @@ func TestAccCodeBuildSourceCredential_disappears(t *testing.T) { func testAccCheckSourceCredentialDestroy(ctx context.Context) resource.TestCheckFunc { return func(s *terraform.State) error { - conn := acctest.Provider.Meta().(*conns.AWSClient).CodeBuildConn(ctx) + conn := acctest.Provider.Meta().(*conns.AWSClient).CodeBuildClient(ctx) for _, rs := range s.RootModule().Resources { if rs.Type != "aws_codebuild_source_credential" { continue } - _, err := tfcodebuild.FindSourceCredentialByARN(ctx, conn, rs.Primary.ID) + _, err := tfcodebuild.FindSourceCredentialsByARN(ctx, conn, rs.Primary.ID) if tfresource.NotFound(err) { continue @@ -144,29 +144,27 @@ func testAccCheckSourceCredentialDestroy(ctx context.Context) resource.TestCheck return fmt.Errorf("CodeBuild Source Credential %s still exists", rs.Primary.ID) } + return nil } } -func testAccCheckSourceCredentialExists(ctx context.Context, name string, sourceCredential *codebuild.SourceCredentialsInfo) resource.TestCheckFunc { +func testAccCheckSourceCredentialExists(ctx context.Context, n string, v *types.SourceCredentialsInfo) resource.TestCheckFunc { return func(s *terraform.State) error { - rs, ok := s.RootModule().Resources[name] + rs, ok := s.RootModule().Resources[n] if !ok { - return fmt.Errorf("Not found: %s", name) + return fmt.Errorf("Not found: %s", n) } - conn := acctest.Provider.Meta().(*conns.AWSClient).CodeBuildConn(ctx) + conn := acctest.Provider.Meta().(*conns.AWSClient).CodeBuildClient(ctx) + + output, err := tfcodebuild.FindSourceCredentialsByARN(ctx, conn, rs.Primary.ID) - output, err := tfcodebuild.FindSourceCredentialByARN(ctx, conn, rs.Primary.ID) if err != nil { return err } - if output == nil { - return fmt.Errorf("CodeBuild Source Credential (%s) not found", rs.Primary.ID) - } - - *sourceCredential = *output + *v = *output return nil } diff --git a/internal/service/codebuild/webhook_test.go b/internal/service/codebuild/webhook_test.go index 6d3046d03ea..170f2bf86be 100644 --- a/internal/service/codebuild/webhook_test.go +++ b/internal/service/codebuild/webhook_test.go @@ -10,18 +10,21 @@ import ( "testing" "github.com/YakDriver/regexache" - "github.com/aws/aws-sdk-go/aws" - "github.com/aws/aws-sdk-go/service/codebuild" + "github.com/aws/aws-sdk-go-v2/aws" + "github.com/aws/aws-sdk-go-v2/service/codebuild/types" sdkacctest "github.com/hashicorp/terraform-plugin-testing/helper/acctest" "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-plugin-testing/terraform" "github.com/hashicorp/terraform-provider-aws/internal/acctest" "github.com/hashicorp/terraform-provider-aws/internal/conns" + tfcodebuild "github.com/hashicorp/terraform-provider-aws/internal/service/codebuild" + "github.com/hashicorp/terraform-provider-aws/internal/tfresource" + "github.com/hashicorp/terraform-provider-aws/names" ) func TestAccCodeBuildWebhook_bitbucket(t *testing.T) { ctx := acctest.Context(t) - var webhook codebuild.Webhook + var webhook types.Webhook rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_webhook.test" @@ -29,7 +32,7 @@ func TestAccCodeBuildWebhook_bitbucket(t *testing.T) { resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckWebhookDestroy(ctx), Steps: []resource.TestStep{ @@ -56,13 +59,13 @@ func TestAccCodeBuildWebhook_bitbucket(t *testing.T) { func TestAccCodeBuildWebhook_gitHub(t *testing.T) { ctx := acctest.Context(t) - var webhook codebuild.Webhook + var webhook types.Webhook rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_webhook.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckWebhookDestroy(ctx), Steps: []resource.TestStep{ @@ -89,13 +92,13 @@ func TestAccCodeBuildWebhook_gitHub(t *testing.T) { func TestAccCodeBuildWebhook_gitHubEnterprise(t *testing.T) { ctx := acctest.Context(t) - var webhook codebuild.Webhook + var webhook types.Webhook rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_webhook.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckWebhookDestroy(ctx), Steps: []resource.TestStep{ @@ -139,13 +142,13 @@ func TestAccCodeBuildWebhook_gitHubEnterprise(t *testing.T) { func TestAccCodeBuildWebhook_buildType(t *testing.T) { ctx := acctest.Context(t) - var webhook codebuild.Webhook + var webhook types.Webhook rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_webhook.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckWebhookDestroy(ctx), Steps: []resource.TestStep{ @@ -182,13 +185,13 @@ func TestAccCodeBuildWebhook_buildType(t *testing.T) { func TestAccCodeBuildWebhook_branchFilter(t *testing.T) { ctx := acctest.Context(t) - var webhook codebuild.Webhook + var webhook types.Webhook rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_webhook.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckWebhookDestroy(ctx), Steps: []resource.TestStep{ @@ -218,13 +221,13 @@ func TestAccCodeBuildWebhook_branchFilter(t *testing.T) { func TestAccCodeBuildWebhook_filterGroup(t *testing.T) { ctx := acctest.Context(t) - var webhook codebuild.Webhook + var webhook types.Webhook rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_webhook.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, codebuild.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckWebhookDestroy(ctx), Steps: []resource.TestStep{ @@ -232,22 +235,22 @@ func TestAccCodeBuildWebhook_filterGroup(t *testing.T) { Config: testAccWebhookConfig_filterGroup(rName), Check: resource.ComposeTestCheckFunc( testAccCheckWebhookExists(ctx, resourceName, &webhook), - testAccCheckWebhookFilter(&webhook, [][]*codebuild.WebhookFilter{ + testAccCheckWebhookFilter(&webhook, [][]*types.WebhookFilter{ { { - Type: aws.String("EVENT"), + Type: types.WebhookFilterTypeEvent, Pattern: aws.String("PUSH"), ExcludeMatchedPattern: aws.Bool(false), }, { - Type: aws.String("HEAD_REF"), + Type: types.WebhookFilterTypeHeadRef, Pattern: aws.String("refs/heads/master"), ExcludeMatchedPattern: aws.Bool(true), }, }, { { - Type: aws.String("EVENT"), + Type: types.WebhookFilterTypeEvent, Pattern: aws.String("PULL_REQUEST_UPDATED"), ExcludeMatchedPattern: aws.Bool(false), }, @@ -265,7 +268,56 @@ func TestAccCodeBuildWebhook_filterGroup(t *testing.T) { }) } -func testAccCheckWebhookFilter(webhook *codebuild.Webhook, expectedFilters [][]*codebuild.WebhookFilter) resource.TestCheckFunc { +func TestAccCodeBuildWebhook_disappears(t *testing.T) { + ctx := acctest.Context(t) + var webhook types.Webhook + rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) + resourceName := "aws_codebuild_webhook.test" + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, + CheckDestroy: testAccCheckWebhookDestroy(ctx), + Steps: []resource.TestStep{ + { + Config: testAccWebhookConfig_gitHub(rName), + Check: resource.ComposeTestCheckFunc( + testAccCheckWebhookExists(ctx, resourceName, &webhook), + acctest.CheckResourceDisappears(ctx, acctest.Provider, tfcodebuild.ResourceWebhook(), resourceName), + ), + ExpectNonEmptyPlan: true, + }, + }, + }) +} + +func TestAccCodeBuildWebhook_Disappears_project(t *testing.T) { + ctx := acctest.Context(t) + var webhook types.Webhook + rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) + resourceName := "aws_codebuild_webhook.test" + projectResourceName := "aws_codebuild_project.test" + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, + CheckDestroy: testAccCheckWebhookDestroy(ctx), + Steps: []resource.TestStep{ + { + Config: testAccWebhookConfig_gitHub(rName), + Check: resource.ComposeTestCheckFunc( + testAccCheckWebhookExists(ctx, resourceName, &webhook), + acctest.CheckResourceDisappears(ctx, acctest.Provider, tfcodebuild.ResourceProject(), projectResourceName), + ), + ExpectNonEmptyPlan: true, + }, + }, + }) +} + +func testAccCheckWebhookFilter(webhook *types.Webhook, expectedFilters [][]*types.WebhookFilter) resource.TestCheckFunc { return func(s *terraform.State) error { if webhook == nil { return fmt.Errorf("webhook missing") @@ -281,65 +333,46 @@ func testAccCheckWebhookFilter(webhook *codebuild.Webhook, expectedFilters [][]* func testAccCheckWebhookDestroy(ctx context.Context) resource.TestCheckFunc { return func(s *terraform.State) error { - conn := acctest.Provider.Meta().(*conns.AWSClient).CodeBuildConn(ctx) + conn := acctest.Provider.Meta().(*conns.AWSClient).CodeBuildClient(ctx) for _, rs := range s.RootModule().Resources { if rs.Type != "aws_codebuild_webhook" { continue } - resp, err := conn.BatchGetProjectsWithContext(ctx, &codebuild.BatchGetProjectsInput{ - Names: []*string{ - aws.String(rs.Primary.ID), - }, - }) + _, err := tfcodebuild.FindWebhookByProjectName(ctx, conn, rs.Primary.ID) - if err != nil { - return err + if tfresource.NotFound(err) { + continue } - if len(resp.Projects) == 0 { - return nil + if err != nil { + return err } - project := resp.Projects[0] - if project.Webhook != nil && project.Webhook.Url != nil { - return fmt.Errorf("Found CodeBuild Project %q Webhook: %s", rs.Primary.ID, project.Webhook) - } + return fmt.Errorf("CodeBuild Webhook %s still exists", rs.Primary.ID) } + return nil } } -func testAccCheckWebhookExists(ctx context.Context, name string, webhook *codebuild.Webhook) resource.TestCheckFunc { +func testAccCheckWebhookExists(ctx context.Context, n string, v *types.Webhook) resource.TestCheckFunc { return func(s *terraform.State) error { - rs, ok := s.RootModule().Resources[name] + rs, ok := s.RootModule().Resources[n] if !ok { - return fmt.Errorf("Not found: %s", name) + return fmt.Errorf("Not found: %s", n) } - conn := acctest.Provider.Meta().(*conns.AWSClient).CodeBuildConn(ctx) + conn := acctest.Provider.Meta().(*conns.AWSClient).CodeBuildClient(ctx) - resp, err := conn.BatchGetProjectsWithContext(ctx, &codebuild.BatchGetProjectsInput{ - Names: []*string{ - aws.String(rs.Primary.ID), - }, - }) + output, err := tfcodebuild.FindWebhookByProjectName(ctx, conn, rs.Primary.ID) if err != nil { return err } - if len(resp.Projects) == 0 { - return fmt.Errorf("CodeBuild Project %q not found", rs.Primary.ID) - } - - project := resp.Projects[0] - if project.Webhook == nil || aws.StringValue(project.Webhook.PayloadUrl) == "" { - return fmt.Errorf("CodeBuild Project %q Webhook not found", rs.Primary.ID) - } - - *webhook = *project.Webhook + *v = *output return nil } From c137ba30d8fe6a0280b310380b265456b0c7a8a7 Mon Sep 17 00:00:00 2001 From: Kit Ewbank Date: Mon, 22 Jan 2024 17:58:30 -0500 Subject: [PATCH 14/30] Fix golangci-lint 'whitespace'. --- internal/service/codebuild/project.go | 1 - internal/service/codebuild/project_test.go | 1 - 2 files changed, 2 deletions(-) diff --git a/internal/service/codebuild/project.go b/internal/service/codebuild/project.go index 577e186a021..d3ae32cf2dc 100644 --- a/internal/service/codebuild/project.go +++ b/internal/service/codebuild/project.go @@ -1873,7 +1873,6 @@ func flattenVPCConfig(apiObject *types.VpcConfig) []interface{} { tfMap["security_group_ids"] = apiObject.SecurityGroupIds return []interface{}{tfMap} - } func flattenBuildBatchConfig(apiObject *types.ProjectBuildBatchConfig) []interface{} { diff --git a/internal/service/codebuild/project_test.go b/internal/service/codebuild/project_test.go index 16303db6263..71d49af1a59 100644 --- a/internal/service/codebuild/project_test.go +++ b/internal/service/codebuild/project_test.go @@ -3488,7 +3488,6 @@ resource "aws_codebuild_project" "test" { } func testAccProjectConfig_buildBatchConfigDelete(rName string, withBuildBatchConfig bool) string { - template := ` resource "aws_codebuild_project" "test" { name = %[1]q From 84bbd5cd3ba2fffb82e6895d91f7302a00518ae4 Mon Sep 17 00:00:00 2001 From: Kit Ewbank Date: Mon, 22 Jan 2024 18:02:00 -0500 Subject: [PATCH 15/30] Fix golangci-lint 'asasalint'. --- internal/service/codebuild/webhook.go | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/internal/service/codebuild/webhook.go b/internal/service/codebuild/webhook.go index fc30667fcd7..5cb5f278b0d 100644 --- a/internal/service/codebuild/webhook.go +++ b/internal/service/codebuild/webhook.go @@ -297,7 +297,10 @@ func flattenWebhookFilterGroups(apiObjects [][]types.WebhookFilter) []interface{ var tfList []interface{} for _, apiObject := range apiObjects { - tfList = append(tfList, flattenWebhookFilters(apiObject)) + tfMap := map[string]interface{}{ + "filter": flattenWebhookFilters(apiObject), + } + tfList = append(tfList, tfMap) } return tfList From 7a1121356ba9ca66eaf2c1963a2b0547fac125c6 Mon Sep 17 00:00:00 2001 From: Kit Ewbank Date: Tue, 23 Jan 2024 08:03:43 -0500 Subject: [PATCH 16/30] r/aws_codebuild_resource_policy: Fix 'ResourceNotFoundException: Resource ARN does not exist' on Delete. --- internal/service/codebuild/errors.go | 12 ++++++++++++ internal/service/codebuild/resource_policy.go | 7 +++---- 2 files changed, 15 insertions(+), 4 deletions(-) create mode 100644 internal/service/codebuild/errors.go diff --git a/internal/service/codebuild/errors.go b/internal/service/codebuild/errors.go new file mode 100644 index 00000000000..0a5654bad57 --- /dev/null +++ b/internal/service/codebuild/errors.go @@ -0,0 +1,12 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package codebuild + +import ( + "github.com/aws/aws-sdk-go-v2/service/codebuild/types" +) + +var ( + errCodeResourceNotFoundException = (*types.ResourceNotFoundException)(nil).ErrorCode() +) diff --git a/internal/service/codebuild/resource_policy.go b/internal/service/codebuild/resource_policy.go index 9c6d021ab11..f850fa3f573 100644 --- a/internal/service/codebuild/resource_policy.go +++ b/internal/service/codebuild/resource_policy.go @@ -9,14 +9,13 @@ import ( "github.com/aws/aws-sdk-go-v2/aws" "github.com/aws/aws-sdk-go-v2/service/codebuild" - "github.com/aws/aws-sdk-go-v2/service/codebuild/types" + "github.com/hashicorp/aws-sdk-go-base/v2/tfawserr" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/retry" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/structure" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" "github.com/hashicorp/terraform-provider-aws/internal/conns" - "github.com/hashicorp/terraform-provider-aws/internal/errs" "github.com/hashicorp/terraform-provider-aws/internal/errs/sdkdiag" "github.com/hashicorp/terraform-provider-aws/internal/tfresource" "github.com/hashicorp/terraform-provider-aws/internal/verify" @@ -124,7 +123,7 @@ func resourceResourcePolicyDelete(ctx context.Context, d *schema.ResourceData, m ResourceArn: aws.String(d.Id()), }) - if errs.IsA[*types.ResourceNotFoundException](err) { + if tfawserr.ErrCodeEquals(err, errCodeResourceNotFoundException) { return diags } @@ -142,7 +141,7 @@ func findResourcePolicyByARN(ctx context.Context, conn *codebuild.Client, arn st output, err := conn.GetResourcePolicy(ctx, input) - if errs.IsA[*types.ResourceNotFoundException](err) { + if tfawserr.ErrCodeEquals(err, errCodeResourceNotFoundException) { return nil, &retry.NotFoundError{ LastError: err, LastRequest: input, From 9701067ff43a56123e037e4e871d84359699bc88 Mon Sep 17 00:00:00 2001 From: Kit Ewbank Date: Tue, 23 Jan 2024 08:06:07 -0500 Subject: [PATCH 17/30] Add CHANGELOG entry. --- .changelog/34121.txt | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 .changelog/34121.txt diff --git a/.changelog/34121.txt b/.changelog/34121.txt new file mode 100644 index 00000000000..ce408dbccdc --- /dev/null +++ b/.changelog/34121.txt @@ -0,0 +1,3 @@ +```release-note:bug +resource/aws_codebuild_project: Allow `build_batch_config` to be removed on Update +``` \ No newline at end of file From e8c28d46e7aa90ba92737611ad92a8d78a79f4ab Mon Sep 17 00:00:00 2001 From: Kit Ewbank Date: Tue, 23 Jan 2024 08:40:16 -0500 Subject: [PATCH 18/30] codebuild: Add 'testAccPreCheckSourceCredentialsForServerType'. --- internal/service/codebuild/exports_test.go | 1 + .../codebuild/source_credential_test.go | 22 +++++++++ internal/service/codebuild/webhook_test.go | 48 +++++++++++++++---- 3 files changed, 63 insertions(+), 8 deletions(-) diff --git a/internal/service/codebuild/exports_test.go b/internal/service/codebuild/exports_test.go index 64454498530..2b585b4c3c9 100644 --- a/internal/service/codebuild/exports_test.go +++ b/internal/service/codebuild/exports_test.go @@ -15,5 +15,6 @@ var ( FindReportGroupByARN = findReportGroupByARN FindResourcePolicyByARN = findResourcePolicyByARN FindSourceCredentialsByARN = findSourceCredentialsByARN + FindSourceCredentials = findSourceCredentials FindWebhookByProjectName = findWebhookByProjectName ) diff --git a/internal/service/codebuild/source_credential_test.go b/internal/service/codebuild/source_credential_test.go index 61a0c5342fc..1b642e2ae0c 100644 --- a/internal/service/codebuild/source_credential_test.go +++ b/internal/service/codebuild/source_credential_test.go @@ -9,6 +9,7 @@ import ( "testing" "github.com/YakDriver/regexache" + "github.com/aws/aws-sdk-go-v2/service/codebuild" "github.com/aws/aws-sdk-go-v2/service/codebuild/types" sdkacctest "github.com/hashicorp/terraform-plugin-testing/helper/acctest" "github.com/hashicorp/terraform-plugin-testing/helper/resource" @@ -123,6 +124,27 @@ func TestAccCodeBuildSourceCredential_disappears(t *testing.T) { }) } +func testAccPreCheckSourceCredentialsForServerType(ctx context.Context, t *testing.T, serverType types.ServerType) { + conn := acctest.Provider.Meta().(*conns.AWSClient).CodeBuildClient(ctx) + + input := &codebuild.ListSourceCredentialsInput{} + output, err := tfcodebuild.FindSourceCredentials(ctx, conn, input, func(v *types.SourceCredentialsInfo) bool { + return v.ServerType == serverType + }) + + if acctest.PreCheckSkipError(err) { + t.Skipf("skipping acceptance testing: %s", err) + } + + if err != nil { + t.Fatalf("unexpected PreCheck error: %s", err) + } + + if len(output) == 0 { + t.Skipf("skipping acceptance testing: Source Credentials (%s) not found", serverType) + } +} + func testAccCheckSourceCredentialDestroy(ctx context.Context) resource.TestCheckFunc { return func(s *terraform.State) error { conn := acctest.Provider.Meta().(*conns.AWSClient).CodeBuildClient(ctx) diff --git a/internal/service/codebuild/webhook_test.go b/internal/service/codebuild/webhook_test.go index 170f2bf86be..37dae624a1c 100644 --- a/internal/service/codebuild/webhook_test.go +++ b/internal/service/codebuild/webhook_test.go @@ -31,7 +31,11 @@ func TestAccCodeBuildWebhook_bitbucket(t *testing.T) { sourceLocation := testAccBitbucketSourceLocationFromEnv() resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeBitbucket) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckWebhookDestroy(ctx), @@ -64,7 +68,11 @@ func TestAccCodeBuildWebhook_gitHub(t *testing.T) { resourceName := "aws_codebuild_webhook.test" resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithub) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckWebhookDestroy(ctx), @@ -97,7 +105,11 @@ func TestAccCodeBuildWebhook_gitHubEnterprise(t *testing.T) { resourceName := "aws_codebuild_webhook.test" resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithubEnterprise) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckWebhookDestroy(ctx), @@ -147,7 +159,11 @@ func TestAccCodeBuildWebhook_buildType(t *testing.T) { resourceName := "aws_codebuild_webhook.test" resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithub) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckWebhookDestroy(ctx), @@ -190,7 +206,11 @@ func TestAccCodeBuildWebhook_branchFilter(t *testing.T) { resourceName := "aws_codebuild_webhook.test" resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithub) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckWebhookDestroy(ctx), @@ -226,7 +246,11 @@ func TestAccCodeBuildWebhook_filterGroup(t *testing.T) { resourceName := "aws_codebuild_webhook.test" resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithub) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckWebhookDestroy(ctx), @@ -275,7 +299,11 @@ func TestAccCodeBuildWebhook_disappears(t *testing.T) { resourceName := "aws_codebuild_webhook.test" resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithub) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckWebhookDestroy(ctx), @@ -300,7 +328,11 @@ func TestAccCodeBuildWebhook_Disappears_project(t *testing.T) { projectResourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithub) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckWebhookDestroy(ctx), From 23d3f77ea2ce6d297ec0e7bf23280122db91e848 Mon Sep 17 00:00:00 2001 From: Kit Ewbank Date: Tue, 23 Jan 2024 09:20:34 -0500 Subject: [PATCH 19/30] r/aws_codebuild_webhook: Fix typo. --- internal/service/codebuild/webhook.go | 2 +- internal/service/codebuild/webhook_test.go | 16 +++++++--------- 2 files changed, 8 insertions(+), 10 deletions(-) diff --git a/internal/service/codebuild/webhook.go b/internal/service/codebuild/webhook.go index 5cb5f278b0d..53f0686bcd6 100644 --- a/internal/service/codebuild/webhook.go +++ b/internal/service/codebuild/webhook.go @@ -282,7 +282,7 @@ func expandWebhookFilter(tfMap map[string]interface{}) *types.WebhookFilter { apiObject.Pattern = aws.String(v) } - if v, ok := tfMap["types"].(string); ok && v != "" { + if v, ok := tfMap["type"].(string); ok && v != "" { apiObject.Type = types.WebhookFilterType(v) } diff --git a/internal/service/codebuild/webhook_test.go b/internal/service/codebuild/webhook_test.go index 37dae624a1c..75abcac7d23 100644 --- a/internal/service/codebuild/webhook_test.go +++ b/internal/service/codebuild/webhook_test.go @@ -6,12 +6,13 @@ package codebuild_test import ( "context" "fmt" - "reflect" "testing" "github.com/YakDriver/regexache" "github.com/aws/aws-sdk-go-v2/aws" "github.com/aws/aws-sdk-go-v2/service/codebuild/types" + "github.com/google/go-cmp/cmp" + "github.com/google/go-cmp/cmp/cmpopts" sdkacctest "github.com/hashicorp/terraform-plugin-testing/helper/acctest" "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-plugin-testing/terraform" @@ -259,7 +260,7 @@ func TestAccCodeBuildWebhook_filterGroup(t *testing.T) { Config: testAccWebhookConfig_filterGroup(rName), Check: resource.ComposeTestCheckFunc( testAccCheckWebhookExists(ctx, resourceName, &webhook), - testAccCheckWebhookFilter(&webhook, [][]*types.WebhookFilter{ + testAccCheckWebhookFilter(&webhook, [][]types.WebhookFilter{ { { Type: types.WebhookFilterTypeEvent, @@ -349,14 +350,11 @@ func TestAccCodeBuildWebhook_Disappears_project(t *testing.T) { }) } -func testAccCheckWebhookFilter(webhook *types.Webhook, expectedFilters [][]*types.WebhookFilter) resource.TestCheckFunc { +func testAccCheckWebhookFilter(webhook *types.Webhook, expectedFilters [][]types.WebhookFilter) resource.TestCheckFunc { return func(s *terraform.State) error { - if webhook == nil { - return fmt.Errorf("webhook missing") - } - - if !reflect.DeepEqual(webhook.FilterGroups, expectedFilters) { - return fmt.Errorf("expected webhook filter configuration (%v), got: %v", expectedFilters, webhook.FilterGroups) + got, want := webhook.FilterGroups, expectedFilters + if diff := cmp.Diff(got, want, cmpopts.IgnoreUnexported(types.WebhookFilter{})); diff != "" { + return fmt.Errorf("unexpected WebhookFilter diff (+wanted, -got): %s", diff) } return nil From ccc569c8c0c89cf26187f3824c8ebc5e7b6c38e4 Mon Sep 17 00:00:00 2001 From: Kit Ewbank Date: Tue, 23 Jan 2024 11:00:41 -0500 Subject: [PATCH 20/30] r/aws_codebuild_project: Use 'testAccPreCheckSourceCredentialsForServerType' in acceptance tests. --- internal/service/codebuild/project_test.go | 1208 ++++++++++++-------- 1 file changed, 713 insertions(+), 495 deletions(-) diff --git a/internal/service/codebuild/project_test.go b/internal/service/codebuild/project_test.go index 71d49af1a59..fd6330be256 100644 --- a/internal/service/codebuild/project_test.go +++ b/internal/service/codebuild/project_test.go @@ -56,6 +56,29 @@ func testAccGitHubSourceLocationFromEnv() string { return sourceLocation } +func TestProject_nameValidation(t *testing.T) { + t.Parallel() + + cases := []struct { + Value string + ErrCount int + }{ + {Value: "_test", ErrCount: 1}, + {Value: "test", ErrCount: 0}, + {Value: "1_test", ErrCount: 0}, + {Value: "test**1", ErrCount: 1}, + {Value: sdkacctest.RandString(256), ErrCount: 1}, + } + + for _, tc := range cases { + _, errors := tfcodebuild.ValidProjectName(tc.Value, "aws_codebuild_project") + + if len(errors) != tc.ErrCount { + t.Fatalf("Expected the AWS CodeBuild project name to trigger a validation error - %s", errors) + } + } +} + func TestAccCodeBuildProject_basic(t *testing.T) { ctx := acctest.Context(t) var project types.Project @@ -65,7 +88,11 @@ func TestAccCodeBuildProject_basic(t *testing.T) { roleResourceName := "aws_iam_role.test" resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithub) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), @@ -117,6 +144,79 @@ func TestAccCodeBuildProject_basic(t *testing.T) { }) } +func TestAccCodeBuildProject_disappears(t *testing.T) { + ctx := acctest.Context(t) + var project types.Project + rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) + + resourceName := "aws_codebuild_project.test" + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithub) + }, + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, + CheckDestroy: testAccCheckProjectDestroy(ctx), + Steps: []resource.TestStep{ + { + Config: testAccProjectConfig_basic(rName), + Check: resource.ComposeTestCheckFunc( + testAccCheckProjectExists(ctx, resourceName, &project), + acctest.CheckResourceDisappears(ctx, acctest.Provider, tfcodebuild.ResourceProject(), resourceName), + ), + ExpectNonEmptyPlan: true, + }, + }, + }) +} + +// TODO +func TestAccCodeBuildProject_tags(t *testing.T) { + ctx := acctest.Context(t) + var project types.Project + rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) + resourceName := "aws_codebuild_project.test" + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithub) + }, + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, + CheckDestroy: testAccCheckProjectDestroy(ctx), + Steps: []resource.TestStep{ + { + Config: testAccProjectConfig_tags(rName, "tag2", "tag2value"), + Check: resource.ComposeTestCheckFunc( + testAccCheckProjectExists(ctx, resourceName, &project), + resource.TestCheckResourceAttr(resourceName, "tags.%", "2"), + resource.TestCheckResourceAttr(resourceName, "tags.tag1", "tag1value"), + resource.TestCheckResourceAttr(resourceName, "tags.tag2", "tag2value"), + ), + }, + { + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccProjectConfig_tags(rName, "tag2", "tag2value-updated"), + Check: resource.ComposeTestCheckFunc( + testAccCheckProjectExists(ctx, resourceName, &project), + resource.TestCheckResourceAttr(resourceName, "tags.%", "2"), + resource.TestCheckResourceAttr(resourceName, "tags.tag1", "tag1value"), + resource.TestCheckResourceAttr(resourceName, "tags.tag2", "tag2value-updated"), + ), + }, + }, + }) +} + func TestAccCodeBuildProject_publicVisibility(t *testing.T) { ctx := acctest.Context(t) var project types.Project @@ -126,7 +226,11 @@ func TestAccCodeBuildProject_publicVisibility(t *testing.T) { roleResourceName := "aws_iam_role.test" resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithub) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), @@ -170,7 +274,11 @@ func TestAccCodeBuildProject_badgeEnabled(t *testing.T) { resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithub) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), @@ -199,7 +307,11 @@ func TestAccCodeBuildProject_buildTimeout(t *testing.T) { resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithub) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), @@ -234,7 +346,11 @@ func TestAccCodeBuildProject_queuedTimeout(t *testing.T) { resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithub) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), @@ -271,7 +387,11 @@ func TestAccCodeBuildProject_cache(t *testing.T) { s3Location2 := rName + "-2" resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithub) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), @@ -355,7 +475,11 @@ func TestAccCodeBuildProject_description(t *testing.T) { resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithub) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), @@ -390,7 +514,11 @@ func TestAccCodeBuildProject_fileSystemLocations(t *testing.T) { resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithub) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID, "efs"), //using efs.EndpointsID will import efs and make linters sad ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), @@ -441,7 +569,11 @@ func TestAccCodeBuildProject_sourceVersion(t *testing.T) { resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithub) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), @@ -464,7 +596,11 @@ func TestAccCodeBuildProject_encryptionKey(t *testing.T) { resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithub) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), @@ -492,7 +628,11 @@ func TestAccCodeBuildProject_Environment_environmentVariable(t *testing.T) { resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithub) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), @@ -542,7 +682,11 @@ func TestAccCodeBuildProject_EnvironmentEnvironmentVariable_type(t *testing.T) { resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithub) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), @@ -587,7 +731,11 @@ func TestAccCodeBuildProject_EnvironmentEnvironmentVariable_value(t *testing.T) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithub) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), @@ -637,7 +785,11 @@ func TestAccCodeBuildProject_Environment_certificate(t *testing.T) { resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithub) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), @@ -658,6 +810,43 @@ func TestAccCodeBuildProject_Environment_certificate(t *testing.T) { }) } +func TestAccCodeBuildProject_Environment_registryCredential(t *testing.T) { + ctx := acctest.Context(t) + var project types.Project + rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) + resourceName := "aws_codebuild_project.test" + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithub) + }, + ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, + CheckDestroy: testAccCheckProjectDestroy(ctx), + Steps: []resource.TestStep{ + { + Config: testAccProjectConfig_environmentRegistryCredential1(rName), + Check: resource.ComposeTestCheckFunc( + testAccCheckProjectExists(ctx, resourceName, &project), + ), + }, + { + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccProjectConfig_environmentRegistryCredential2(rName), + Check: resource.ComposeTestCheckFunc( + testAccCheckProjectExists(ctx, resourceName, &project), + ), + }, + }, + }) +} + func TestAccCodeBuildProject_Logs_cloudWatchLogs(t *testing.T) { ctx := acctest.Context(t) var project types.Project @@ -665,7 +854,11 @@ func TestAccCodeBuildProject_Logs_cloudWatchLogs(t *testing.T) { resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithub) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), @@ -711,7 +904,11 @@ func TestAccCodeBuildProject_Logs_s3Logs(t *testing.T) { resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithub) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), @@ -761,7 +958,11 @@ func TestAccCodeBuildProject_buildBatch(t *testing.T) { } resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithub) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), @@ -806,7 +1007,11 @@ func TestAccCodeBuildProject_buildBatchConfigDelete(t *testing.T) { resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithub) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), @@ -845,7 +1050,11 @@ func TestAccCodeBuildProject_Source_gitCloneDepth(t *testing.T) { resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithub) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), @@ -917,7 +1126,11 @@ func TestAccCodeBuildProject_SourceGitSubmodules_gitHub(t *testing.T) { resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithub) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), @@ -950,7 +1163,11 @@ func TestAccCodeBuildProject_SourceGitSubmodules_gitHubEnterprise(t *testing.T) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithubEnterprise) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), @@ -1045,7 +1262,11 @@ func TestAccCodeBuildProject_SecondarySourcesGitSubmodules_gitHub(t *testing.T) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithub) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), @@ -1078,7 +1299,11 @@ func TestAccCodeBuildProject_SecondarySourcesGitSubmodules_gitHubEnterprise(t *t resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithubEnterprise) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), @@ -1197,26 +1422,27 @@ func TestAccCodeBuildProject_SecondarySourcesVersions(t *testing.T) { }) } -func TestAccCodeBuildProject_SourceBuildStatus_gitHubEnterprise(t *testing.T) { +func TestAccCodeBuildProject_Source_insecureSSL(t *testing.T) { ctx := acctest.Context(t) var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" - if acctest.Partition() == "aws-us-gov" { - t.Skip("CodeBuild Project build status config is not supported in GovCloud partition") - } - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithub) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ { - Config: testAccProjectConfig_sourceBuildStatusGitHubEnterprise(rName), + Config: testAccProjectConfig_sourceInsecureSSL(rName, true), Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists(ctx, resourceName, &project), + resource.TestCheckResourceAttr(resourceName, "source.0.insecure_ssl", "true"), ), }, { @@ -1224,27 +1450,38 @@ func TestAccCodeBuildProject_SourceBuildStatus_gitHubEnterprise(t *testing.T) { ImportState: true, ImportStateVerify: true, }, + { + Config: testAccProjectConfig_sourceInsecureSSL(rName, false), + Check: resource.ComposeTestCheckFunc( + testAccCheckProjectExists(ctx, resourceName, &project), + resource.TestCheckResourceAttr(resourceName, "source.0.insecure_ssl", "false"), + ), + }, }, }) } -func TestAccCodeBuildProject_Source_insecureSSL(t *testing.T) { +func TestAccCodeBuildProject_SourceBuildStatus_gitHubEnterprise(t *testing.T) { ctx := acctest.Context(t) var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithubEnterprise) + acctest.PreCheckPartitionNot(t, names.USGovCloudPartitionID) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ { - Config: testAccProjectConfig_sourceInsecureSSL(rName, true), + Config: testAccProjectConfig_sourceBuildStatusGitHubEnterprise(rName), Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists(ctx, resourceName, &project), - resource.TestCheckResourceAttr(resourceName, "source.0.insecure_ssl", "true"), ), }, { @@ -1252,33 +1489,28 @@ func TestAccCodeBuildProject_Source_insecureSSL(t *testing.T) { ImportState: true, ImportStateVerify: true, }, - { - Config: testAccProjectConfig_sourceInsecureSSL(rName, false), - Check: resource.ComposeTestCheckFunc( - testAccCheckProjectExists(ctx, resourceName, &project), - resource.TestCheckResourceAttr(resourceName, "source.0.insecure_ssl", "false"), - ), - }, }, }) } -func TestAccCodeBuildProject_SourceReportBuildStatus_bitbucket(t *testing.T) { +func TestAccCodeBuildProject_SourceReportBuildStatus_gitHubEnterprise(t *testing.T) { ctx := acctest.Context(t) var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" - sourceLocation := testAccBitbucketSourceLocationFromEnv() - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithubEnterprise) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ { - Config: testAccProjectConfig_sourceReportBuildStatusBitbucket(rName, sourceLocation, true), + Config: testAccProjectConfig_sourceReportBuildStatusGitHubEnterprise(rName, true), Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists(ctx, resourceName, &project), resource.TestCheckResourceAttr(resourceName, "source.0.report_build_status", "true"), @@ -1290,7 +1522,7 @@ func TestAccCodeBuildProject_SourceReportBuildStatus_bitbucket(t *testing.T) { ImportStateVerify: true, }, { - Config: testAccProjectConfig_sourceReportBuildStatusBitbucket(rName, sourceLocation, false), + Config: testAccProjectConfig_sourceReportBuildStatusGitHubEnterprise(rName, false), Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists(ctx, resourceName, &project), resource.TestCheckResourceAttr(resourceName, "source.0.report_build_status", "false"), @@ -1300,20 +1532,26 @@ func TestAccCodeBuildProject_SourceReportBuildStatus_bitbucket(t *testing.T) { }) } -func TestAccCodeBuildProject_SourceReportBuildStatus_gitHub(t *testing.T) { +func TestAccCodeBuildProject_SourceReportBuildStatus_bitbucket(t *testing.T) { ctx := acctest.Context(t) var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" + sourceLocation := testAccBitbucketSourceLocationFromEnv() + resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeBitbucket) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ { - Config: testAccProjectConfig_sourceReportBuildStatusGitHub(rName, true), + Config: testAccProjectConfig_sourceReportBuildStatusBitbucket(rName, sourceLocation, true), Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists(ctx, resourceName, &project), resource.TestCheckResourceAttr(resourceName, "source.0.report_build_status", "true"), @@ -1325,7 +1563,7 @@ func TestAccCodeBuildProject_SourceReportBuildStatus_gitHub(t *testing.T) { ImportStateVerify: true, }, { - Config: testAccProjectConfig_sourceReportBuildStatusGitHub(rName, false), + Config: testAccProjectConfig_sourceReportBuildStatusBitbucket(rName, sourceLocation, false), Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists(ctx, resourceName, &project), resource.TestCheckResourceAttr(resourceName, "source.0.report_build_status", "false"), @@ -1335,20 +1573,24 @@ func TestAccCodeBuildProject_SourceReportBuildStatus_gitHub(t *testing.T) { }) } -func TestAccCodeBuildProject_SourceReportBuildStatus_gitHubEnterprise(t *testing.T) { +func TestAccCodeBuildProject_SourceReportBuildStatus_gitHub(t *testing.T) { ctx := acctest.Context(t) var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithub) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ { - Config: testAccProjectConfig_sourceReportBuildStatusGitHubEnterprise(rName, true), + Config: testAccProjectConfig_sourceReportBuildStatusGitHub(rName, true), Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists(ctx, resourceName, &project), resource.TestCheckResourceAttr(resourceName, "source.0.report_build_status", "true"), @@ -1360,7 +1602,7 @@ func TestAccCodeBuildProject_SourceReportBuildStatus_gitHubEnterprise(t *testing ImportStateVerify: true, }, { - Config: testAccProjectConfig_sourceReportBuildStatusGitHubEnterprise(rName, false), + Config: testAccProjectConfig_sourceReportBuildStatusGitHub(rName, false), Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists(ctx, resourceName, &project), resource.TestCheckResourceAttr(resourceName, "source.0.report_build_status", "false"), @@ -1379,7 +1621,11 @@ func TestAccCodeBuildProject_SourceType_bitbucket(t *testing.T) { sourceLocation := testAccBitbucketSourceLocationFromEnv() resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeBitbucket) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), @@ -1464,7 +1710,11 @@ func TestAccCodeBuildProject_SourceType_gitHubEnterprise(t *testing.T) { resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithubEnterprise) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), @@ -1576,25 +1826,30 @@ phases: }) } -func TestAccCodeBuildProject_tags(t *testing.T) { +func TestAccCodeBuildProject_vpc(t *testing.T) { ctx := acctest.Context(t) var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithub) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ { - Config: testAccProjectConfig_tags(rName, "tag2", "tag2value"), + Config: testAccProjectConfig_vpc2(rName), Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists(ctx, resourceName, &project), - resource.TestCheckResourceAttr(resourceName, "tags.%", "2"), - resource.TestCheckResourceAttr(resourceName, "tags.tag1", "tag1value"), - resource.TestCheckResourceAttr(resourceName, "tags.tag2", "tag2value"), + resource.TestCheckResourceAttr(resourceName, "vpc_config.#", "1"), + resource.TestCheckResourceAttr(resourceName, "vpc_config.0.security_group_ids.#", "1"), + resource.TestCheckResourceAttr(resourceName, "vpc_config.0.subnets.#", "2"), + resource.TestMatchResourceAttr(resourceName, "vpc_config.0.vpc_id", regexache.MustCompile(`^vpc-`)), ), }, { @@ -1603,53 +1858,13 @@ func TestAccCodeBuildProject_tags(t *testing.T) { ImportStateVerify: true, }, { - Config: testAccProjectConfig_tags(rName, "tag2", "tag2value-updated"), + Config: testAccProjectConfig_vpc1(rName), Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists(ctx, resourceName, &project), - resource.TestCheckResourceAttr(resourceName, "tags.%", "2"), - resource.TestCheckResourceAttr(resourceName, "tags.tag1", "tag1value"), - resource.TestCheckResourceAttr(resourceName, "tags.tag2", "tag2value-updated"), - ), - }, - }, - }) -} - -func TestAccCodeBuildProject_vpc(t *testing.T) { - ctx := acctest.Context(t) - var project types.Project - rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) - resourceName := "aws_codebuild_project.test" - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, - CheckDestroy: testAccCheckProjectDestroy(ctx), - Steps: []resource.TestStep{ - { - Config: testAccProjectConfig_vpc2(rName), - Check: resource.ComposeTestCheckFunc( - testAccCheckProjectExists(ctx, resourceName, &project), - resource.TestCheckResourceAttr(resourceName, "vpc_config.#", "1"), - resource.TestCheckResourceAttr(resourceName, "vpc_config.0.security_group_ids.#", "1"), - resource.TestCheckResourceAttr(resourceName, "vpc_config.0.subnets.#", "2"), - resource.TestMatchResourceAttr(resourceName, "vpc_config.0.vpc_id", regexache.MustCompile(`^vpc-`)), - ), - }, - { - ResourceName: resourceName, - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccProjectConfig_vpc1(rName), - Check: resource.ComposeTestCheckFunc( - testAccCheckProjectExists(ctx, resourceName, &project), - resource.TestCheckResourceAttr(resourceName, "vpc_config.#", "1"), - resource.TestCheckResourceAttr(resourceName, "vpc_config.0.security_group_ids.#", "1"), - resource.TestCheckResourceAttr(resourceName, "vpc_config.0.subnets.#", "1"), - resource.TestMatchResourceAttr(resourceName, "vpc_config.0.vpc_id", regexache.MustCompile(`^vpc-`)), + resource.TestCheckResourceAttr(resourceName, "vpc_config.#", "1"), + resource.TestCheckResourceAttr(resourceName, "vpc_config.0.security_group_ids.#", "1"), + resource.TestCheckResourceAttr(resourceName, "vpc_config.0.subnets.#", "1"), + resource.TestMatchResourceAttr(resourceName, "vpc_config.0.vpc_id", regexache.MustCompile(`^vpc-`)), ), }, { @@ -1670,7 +1885,11 @@ func TestAccCodeBuildProject_windowsServer2019Container(t *testing.T) { resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithub) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), @@ -1704,7 +1923,11 @@ func TestAccCodeBuildProject_armContainer(t *testing.T) { resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithub) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), @@ -1731,7 +1954,11 @@ func TestAccCodeBuildProject_linuxLambdaContainer(t *testing.T) { resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithub) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), @@ -1768,7 +1995,11 @@ func TestAccCodeBuildProject_Artifacts_artifactIdentifier(t *testing.T) { artifactIdentifier2 := "artifactIdentifier2" resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithub) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), @@ -1805,7 +2036,11 @@ func TestAccCodeBuildProject_Artifacts_encryptionDisabled(t *testing.T) { resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithub) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), @@ -1843,7 +2078,11 @@ func TestAccCodeBuildProject_Artifacts_location(t *testing.T) { resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithub) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), @@ -1883,7 +2122,11 @@ func TestAccCodeBuildProject_Artifacts_name(t *testing.T) { name2 := "name2" resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithub) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), @@ -1920,7 +2163,11 @@ func TestAccCodeBuildProject_Artifacts_namespaceType(t *testing.T) { resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithub) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), @@ -1957,7 +2204,11 @@ func TestAccCodeBuildProject_Artifacts_overrideArtifactName(t *testing.T) { resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithub) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), @@ -1994,7 +2245,11 @@ func TestAccCodeBuildProject_Artifacts_packaging(t *testing.T) { resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithub) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), @@ -2031,7 +2286,11 @@ func TestAccCodeBuildProject_Artifacts_path(t *testing.T) { resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithub) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), @@ -2145,7 +2404,11 @@ func TestAccCodeBuildProject_secondaryArtifacts(t *testing.T) { resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithub) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), @@ -2183,7 +2446,11 @@ func TestAccCodeBuildProject_SecondaryArtifacts_artifactIdentifier(t *testing.T) artifactIdentifier2 := "artifactIdentifier2" resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithub) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), @@ -2224,7 +2491,11 @@ func TestAccCodeBuildProject_SecondaryArtifacts_overrideArtifactName(t *testing. resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithub) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), @@ -2265,7 +2536,11 @@ func TestAccCodeBuildProject_SecondaryArtifacts_encryptionDisabled(t *testing.T) resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithub) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), @@ -2307,7 +2582,11 @@ func TestAccCodeBuildProject_SecondaryArtifacts_location(t *testing.T) { resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithub) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), @@ -2354,7 +2633,11 @@ func TestAccCodeBuildProject_SecondaryArtifacts_name(t *testing.T) { name2 := "name2" resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithub) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), @@ -2395,7 +2678,11 @@ func TestAccCodeBuildProject_SecondaryArtifacts_namespaceType(t *testing.T) { resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithub) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), @@ -2429,25 +2716,32 @@ func TestAccCodeBuildProject_SecondaryArtifacts_namespaceType(t *testing.T) { }) } -func TestAccCodeBuildProject_SecondaryArtifacts_packaging(t *testing.T) { +func TestAccCodeBuildProject_SecondaryArtifacts_path(t *testing.T) { ctx := acctest.Context(t) var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" + path1 := "path1" + path2 := "path2" + resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithub) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ { - Config: testAccProjectConfig_secondaryArtifactsPackaging(rName, string(types.ArtifactPackagingZip)), + Config: testAccProjectConfig_secondaryArtifactsPath(rName, path1), Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists(ctx, resourceName, &project), resource.TestCheckResourceAttr(resourceName, "secondary_artifacts.#", "1"), resource.TestCheckTypeSetElemNestedAttrs(resourceName, "secondary_artifacts.*", map[string]string{ - "packaging": string(types.ArtifactPackagingZip), + "path": path1, }), ), }, @@ -2457,12 +2751,12 @@ func TestAccCodeBuildProject_SecondaryArtifacts_packaging(t *testing.T) { ImportStateVerify: true, }, { - Config: testAccProjectConfig_secondaryArtifactsPackaging(rName, string(types.ArtifactPackagingNone)), + Config: testAccProjectConfig_secondaryArtifactsPath(rName, path2), Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists(ctx, resourceName, &project), resource.TestCheckResourceAttr(resourceName, "secondary_artifacts.#", "1"), resource.TestCheckTypeSetElemNestedAttrs(resourceName, "secondary_artifacts.*", map[string]string{ - "packaging": string(types.ArtifactPackagingNone), + "path": path2, }), ), }, @@ -2470,28 +2764,29 @@ func TestAccCodeBuildProject_SecondaryArtifacts_packaging(t *testing.T) { }) } -func TestAccCodeBuildProject_SecondaryArtifacts_path(t *testing.T) { +func TestAccCodeBuildProject_SecondaryArtifacts_packaging(t *testing.T) { ctx := acctest.Context(t) var project types.Project rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_codebuild_project.test" - path1 := "path1" - path2 := "path2" - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithub) + }, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ { - Config: testAccProjectConfig_secondaryArtifactsPath(rName, path1), + Config: testAccProjectConfig_secondaryArtifactsPackaging(rName, string(types.ArtifactPackagingZip)), Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists(ctx, resourceName, &project), resource.TestCheckResourceAttr(resourceName, "secondary_artifacts.#", "1"), resource.TestCheckTypeSetElemNestedAttrs(resourceName, "secondary_artifacts.*", map[string]string{ - "path": path1, + "packaging": string(types.ArtifactPackagingZip), }), ), }, @@ -2501,12 +2796,12 @@ func TestAccCodeBuildProject_SecondaryArtifacts_path(t *testing.T) { ImportStateVerify: true, }, { - Config: testAccProjectConfig_secondaryArtifactsPath(rName, path2), + Config: testAccProjectConfig_secondaryArtifactsPackaging(rName, string(types.ArtifactPackagingNone)), Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists(ctx, resourceName, &project), resource.TestCheckResourceAttr(resourceName, "secondary_artifacts.#", "1"), resource.TestCheckTypeSetElemNestedAttrs(resourceName, "secondary_artifacts.*", map[string]string{ - "path": path2, + "packaging": string(types.ArtifactPackagingNone), }), ), }, @@ -2579,29 +2874,6 @@ func TestAccCodeBuildProject_SecondarySources_codeCommit(t *testing.T) { }) } -func TestProject_nameValidation(t *testing.T) { - t.Parallel() - - cases := []struct { - Value string - ErrCount int - }{ - {Value: "_test", ErrCount: 1}, - {Value: "test", ErrCount: 0}, - {Value: "1_test", ErrCount: 0}, - {Value: "test**1", ErrCount: 1}, - {Value: sdkacctest.RandString(256), ErrCount: 1}, - } - - for _, tc := range cases { - _, errors := tfcodebuild.ValidProjectName(tc.Value, "aws_codebuild_project") - - if len(errors) != tc.ErrCount { - t.Fatalf("Expected the AWS CodeBuild project name to trigger a validation error - %s", errors) - } - } -} - func TestAccCodeBuildProject_concurrentBuildLimit(t *testing.T) { ctx := acctest.Context(t) var project types.Project @@ -2609,7 +2881,11 @@ func TestAccCodeBuildProject_concurrentBuildLimit(t *testing.T) { resourceName := "aws_codebuild_project.test" resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + PreCheck: func() { + acctest.PreCheck(ctx, t) + testAccPreCheck(ctx, t) + testAccPreCheckSourceCredentialsForServerType(ctx, t, types.ServerTypeGithub) + }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), CheckDestroy: testAccCheckProjectDestroy(ctx), @@ -2637,64 +2913,6 @@ func TestAccCodeBuildProject_concurrentBuildLimit(t *testing.T) { }) } -func TestAccCodeBuildProject_Environment_registryCredential(t *testing.T) { - ctx := acctest.Context(t) - var project types.Project - rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) - resourceName := "aws_codebuild_project.test" - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, - CheckDestroy: testAccCheckProjectDestroy(ctx), - Steps: []resource.TestStep{ - { - Config: testAccProjectConfig_environmentRegistryCredential1(rName), - Check: resource.ComposeTestCheckFunc( - testAccCheckProjectExists(ctx, resourceName, &project), - ), - }, - { - ResourceName: resourceName, - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccProjectConfig_environmentRegistryCredential2(rName), - Check: resource.ComposeTestCheckFunc( - testAccCheckProjectExists(ctx, resourceName, &project), - ), - }, - }, - }) -} - -func TestAccCodeBuildProject_disappears(t *testing.T) { - ctx := acctest.Context(t) - var project types.Project - rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) - - resourceName := "aws_codebuild_project.test" - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, names.CodeBuildEndpointID), - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, - CheckDestroy: testAccCheckProjectDestroy(ctx), - Steps: []resource.TestStep{ - { - Config: testAccProjectConfig_basic(rName), - Check: resource.ComposeTestCheckFunc( - testAccCheckProjectExists(ctx, resourceName, &project), - acctest.CheckResourceDisappears(ctx, acctest.Provider, tfcodebuild.ResourceProject(), resourceName), - ), - ExpectNonEmptyPlan: true, - }, - }, - }) -} - func testAccCheckProjectExists(ctx context.Context, n string, v *types.Project) resource.TestCheckFunc { return func(s *terraform.State) error { rs, ok := s.RootModule().Resources[n] @@ -2995,10 +3213,16 @@ resource "aws_codebuild_project" "test" { `, queuedTimeout, rName)) } -func testAccProjectConfig_s3ComputedLocation(rName string) string { +func testAccProjectConfig_cache(rName, cacheLocation, cacheType string) string { return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` -resource "aws_s3_bucket" "test" { - bucket_prefix = "cache" +resource "aws_s3_bucket" "test1" { + bucket = "%[1]s-1" + force_destroy = true +} + +resource "aws_s3_bucket" "test2" { + bucket = "%[1]s-2" + force_destroy = true } resource "aws_codebuild_project" "test" { @@ -3009,6 +3233,11 @@ resource "aws_codebuild_project" "test" { type = "NO_ARTIFACTS" } + cache { + location = %[2]q + type = %[3]q + } + environment { compute_type = "BUILD_GENERAL1_SMALL" image = "2" @@ -3020,26 +3249,13 @@ resource "aws_codebuild_project" "test" { location = "https://github.com/hashicorp/packer.git" } - cache { - type = "S3" - location = aws_s3_bucket.test.bucket - } + depends_on = [aws_s3_bucket.test1, aws_s3_bucket.test2] } -`, rName)) +`, rName, cacheLocation, cacheType)) } -func testAccProjectConfig_cache(rName, cacheLocation, cacheType string) string { +func testAccProjectConfig_localCache(rName, modeType string) string { return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` -resource "aws_s3_bucket" "test1" { - bucket = "%[1]s-1" - force_destroy = true -} - -resource "aws_s3_bucket" "test2" { - bucket = "%[1]s-2" - force_destroy = true -} - resource "aws_codebuild_project" "test" { name = %[1]q service_role = aws_iam_role.test.arn @@ -3049,8 +3265,8 @@ resource "aws_codebuild_project" "test" { } cache { - location = %[2]q - type = %[3]q + type = "LOCAL" + modes = [%[2]q] } environment { @@ -3063,14 +3279,16 @@ resource "aws_codebuild_project" "test" { type = "GITHUB" location = "https://github.com/hashicorp/packer.git" } - - depends_on = [aws_s3_bucket.test1, aws_s3_bucket.test2] } -`, rName, cacheLocation, cacheType)) +`, rName, modeType)) } -func testAccProjectConfig_localCache(rName, modeType string) string { +func testAccProjectConfig_s3ComputedLocation(rName string) string { return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` +resource "aws_s3_bucket" "test" { + bucket_prefix = "cache" +} + resource "aws_codebuild_project" "test" { name = %[1]q service_role = aws_iam_role.test.arn @@ -3079,11 +3297,6 @@ resource "aws_codebuild_project" "test" { type = "NO_ARTIFACTS" } - cache { - type = "LOCAL" - modes = [%[2]q] - } - environment { compute_type = "BUILD_GENERAL1_SMALL" image = "2" @@ -3094,8 +3307,13 @@ resource "aws_codebuild_project" "test" { type = "GITHUB" location = "https://github.com/hashicorp/packer.git" } + + cache { + type = "S3" + location = aws_s3_bucket.test.bucket + } } -`, rName, modeType)) +`, rName)) } func testAccProjectConfig_description(rName, description string) string { @@ -3123,6 +3341,153 @@ resource "aws_codebuild_project" "test" { `, description, rName)) } +func testAccProjectConfig_fileSystemLocations(rName, mountPoint string) string { + return acctest.ConfigCompose( + testAccProjectConfig_Base_ServiceRole(rName), + acctest.ConfigAvailableAZsNoOptIn(), + fmt.Sprintf(` +resource "aws_efs_file_system" "test" { + tags = { + Name = %[1]q + } +} + +resource "aws_vpc" "test" { + cidr_block = "10.0.0.0/16" + + tags = { + Name = %[1]q + } +} + +resource "aws_internet_gateway" "test" { + vpc_id = aws_vpc.test.id + + tags = { + Name = %[1]q + } +} + +resource "aws_subnet" "public" { + availability_zone = data.aws_availability_zones.available.names[0] + cidr_block = "10.0.0.0/24" + vpc_id = aws_vpc.test.id + + tags = { + Name = "%[1]s-public" + } +} + +resource "aws_route_table" "public" { + vpc_id = aws_vpc.test.id + + tags = { + Name = "%[1]s-public" + } +} + +resource "aws_route_table_association" "public" { + route_table_id = aws_route_table.public.id + subnet_id = aws_subnet.public.id +} + +resource "aws_route" "public" { + route_table_id = aws_route_table.public.id + destination_cidr_block = "0.0.0.0/0" + gateway_id = aws_internet_gateway.test.id +} + +resource "aws_subnet" "private" { + availability_zone = data.aws_availability_zones.available.names[0] + cidr_block = "10.0.1.0/24" + vpc_id = aws_vpc.test.id + + tags = { + Name = "%[1]s-private" + } +} + +resource "aws_eip" "test" { + domain = "vpc" + + tags = { + Name = %[1]q + } +} + +resource "aws_nat_gateway" "test" { + allocation_id = aws_eip.test.id + subnet_id = aws_subnet.public.id + + tags = { + Name = %[1]q + } + + depends_on = [aws_route.public] +} + +resource "aws_route_table" "private" { + vpc_id = aws_vpc.test.id + + tags = { + Name = "%[1]s-private" + } +} + +resource "aws_route_table_association" "private" { + route_table_id = aws_route.private.route_table_id + subnet_id = aws_subnet.private.id +} + +resource "aws_route" "private" { + route_table_id = aws_route_table.private.id + destination_cidr_block = "0.0.0.0/0" + nat_gateway_id = aws_nat_gateway.test.id +} + +resource "aws_security_group" "test" { + name = %[1]q + vpc_id = aws_vpc.test.id +} + +resource "aws_codebuild_project" "test" { + name = %[1]q + service_role = aws_iam_role.test.arn + + artifacts { + type = "NO_ARTIFACTS" + } + + environment { + compute_type = "BUILD_GENERAL1_SMALL" + image = "2" + type = "LINUX_CONTAINER" + + privileged_mode = true + } + + source { + type = "GITHUB" + location = "https://github.com/hashicorp/packer.git" + } + + vpc_config { + security_group_ids = [aws_security_group.test.id] + subnets = [aws_subnet.private.id] + vpc_id = aws_vpc.test.id + } + + file_system_locations { + identifier = "test" + location = "${aws_efs_file_system.test.dns_name}:/directory-path" + type = "EFS" + mount_point = %[2]q + mount_options = "nfsvers=4.1,rsize=1048576,wsize=1048576,hard,timeo=450,retrans=3" + } +} +`, rName, mountPoint)) +} + func testAccProjectConfig_sourceVersion(rName, sourceVersion string) string { return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` resource "aws_codebuild_project" "test" { @@ -3451,6 +3816,45 @@ resource "aws_codebuild_project" "test" { `, rName, status, gName, sName)) } +func testAccProjectConfig_s3Logs(rName, status, location string, encryptionDisabled bool) string { + return acctest.ConfigCompose( + testAccProjectConfig_Base_ServiceRole(rName), + fmt.Sprintf(` +resource "aws_s3_bucket" "test" { + bucket = %[1]q + force_destroy = true +} + +resource "aws_codebuild_project" "test" { + name = %[1]q + service_role = aws_iam_role.test.arn + + artifacts { + type = "NO_ARTIFACTS" + } + + environment { + compute_type = "BUILD_GENERAL1_SMALL" + image = "2" + type = "LINUX_CONTAINER" + } + + source { + location = "https://github.com/hashicorp/packer.git" + type = "GITHUB" + } + + logs_config { + s3_logs { + status = %[2]q + location = %[3]q + encryption_disabled = %[4]t + } + } +} +`, rName, status, location, encryptionDisabled)) +} + func testAccProjectConfig_buildBatch(rName string, combineArtifacts bool, computeTypesAllowed string, maximumBuildsAllowed, timeoutInMins int) string { return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` resource "aws_codebuild_project" "test" { @@ -3519,56 +3923,17 @@ build_batch_config { restrictions { compute_types_allowed = [] maximum_builds_allowed = 10 - } - - service_role = aws_iam_role.test.arn - timeout_in_mins = 480 -} - ` - - if withBuildBatchConfig { - return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(template, rName, buildBatchConfig)) - } - return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(template, rName, "")) -} - -func testAccProjectConfig_s3Logs(rName, status, location string, encryptionDisabled bool) string { - return acctest.ConfigCompose( - testAccProjectConfig_Base_ServiceRole(rName), - fmt.Sprintf(` -resource "aws_s3_bucket" "test" { - bucket = %[1]q - force_destroy = true -} - -resource "aws_codebuild_project" "test" { - name = %[1]q - service_role = aws_iam_role.test.arn - - artifacts { - type = "NO_ARTIFACTS" - } - - environment { - compute_type = "BUILD_GENERAL1_SMALL" - image = "2" - type = "LINUX_CONTAINER" - } - - source { - location = "https://github.com/hashicorp/packer.git" - type = "GITHUB" - } + } - logs_config { - s3_logs { - status = %[2]q - location = %[3]q - encryption_disabled = %[4]t - } - } + service_role = aws_iam_role.test.arn + timeout_in_mins = 480 } -`, rName, status, location, encryptionDisabled)) + ` + + if withBuildBatchConfig { + return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(template, rName, buildBatchConfig)) + } + return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(template, rName, "")) } func testAccProjectConfig_sourceGitCloneDepth(rName string, gitCloneDepth int) string { @@ -3964,7 +4329,7 @@ resource "aws_codebuild_project" "test" { `, rName, insecureSSL)) } -func testAccProjectConfig_sourceReportBuildStatusBitbucket(rName, sourceLocation string, reportBuildStatus bool) string { +func testAccProjectConfig_sourceBuildStatusGitHubEnterprise(rName string) string { return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` resource "aws_codebuild_project" "test" { name = %[1]q @@ -3981,15 +4346,19 @@ resource "aws_codebuild_project" "test" { } source { - location = %[2]q - report_build_status = %[3]t - type = "BITBUCKET" + location = "https://example.com/organization/repository.git" + type = "GITHUB_ENTERPRISE" + + build_status_config { + context = "codebuild" + target_url = "https://example.com/$${CODEBUILD_BUILD_ID}" + } } } -`, rName, sourceLocation, reportBuildStatus)) +`, rName)) } -func testAccProjectConfig_sourceReportBuildStatusGitHub(rName string, reportBuildStatus bool) string { +func testAccProjectConfig_sourceReportBuildStatusGitHubEnterprise(rName string, reportBuildStatus bool) string { return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` resource "aws_codebuild_project" "test" { name = %[1]q @@ -4006,15 +4375,15 @@ resource "aws_codebuild_project" "test" { } source { - location = "https://github.com/hashicorp/packer.git" + location = "https://example.com/organization/repository.git" report_build_status = %[2]t - type = "GITHUB" + type = "GITHUB_ENTERPRISE" } } `, rName, reportBuildStatus)) } -func testAccProjectConfig_sourceReportBuildStatusGitHubEnterprise(rName string, reportBuildStatus bool) string { +func testAccProjectConfig_sourceReportBuildStatusBitbucket(rName, sourceLocation string, reportBuildStatus bool) string { return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` resource "aws_codebuild_project" "test" { name = %[1]q @@ -4031,9 +4400,34 @@ resource "aws_codebuild_project" "test" { } source { - location = "https://example.com/organization/repository.git" + location = %[2]q + report_build_status = %[3]t + type = "BITBUCKET" + } +} +`, rName, sourceLocation, reportBuildStatus)) +} + +func testAccProjectConfig_sourceReportBuildStatusGitHub(rName string, reportBuildStatus bool) string { + return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` +resource "aws_codebuild_project" "test" { + name = %[1]q + service_role = aws_iam_role.test.arn + + artifacts { + type = "NO_ARTIFACTS" + } + + environment { + compute_type = "BUILD_GENERAL1_SMALL" + image = "2" + type = "LINUX_CONTAINER" + } + + source { + location = "https://github.com/hashicorp/packer.git" report_build_status = %[2]t - type = "GITHUB_ENTERPRISE" + type = "GITHUB" } } `, rName, reportBuildStatus)) @@ -5047,7 +5441,7 @@ resource "aws_codebuild_project" "test" { `, rName, overrideArtifactName)) } -func testAccProjectConfig_secondaryArtifactsPackaging(rName, packaging string) string { +func testAccProjectConfig_secondaryArtifactsPath(rName, path string) string { return acctest.ConfigCompose( testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` @@ -5067,7 +5461,7 @@ resource "aws_codebuild_project" "test" { secondary_artifacts { artifact_identifier = "secondaryArtifact1" - packaging = %[2]q + path = %[2]q location = aws_s3_bucket.test.bucket type = "S3" } @@ -5083,10 +5477,10 @@ resource "aws_codebuild_project" "test" { location = "https://github.com/hashicorp/packer.git" } } -`, rName, packaging)) +`, rName, path)) } -func testAccProjectConfig_secondaryArtifactsPath(rName, path string) string { +func testAccProjectConfig_secondaryArtifactsPackaging(rName, packaging string) string { return acctest.ConfigCompose( testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` @@ -5106,7 +5500,7 @@ resource "aws_codebuild_project" "test" { secondary_artifacts { artifact_identifier = "secondaryArtifact1" - path = %[2]q + packaging = %[2]q location = aws_s3_bucket.test.bucket type = "S3" } @@ -5122,7 +5516,7 @@ resource "aws_codebuild_project" "test" { location = "https://github.com/hashicorp/packer.git" } } -`, rName, path)) +`, rName, packaging)) } func testAccProjectConfig_secondaryArtifactsType(rName string, artifactType string) string { @@ -5199,35 +5593,6 @@ resource "aws_codebuild_project" "test" { `, rName)) } -func testAccProjectConfig_sourceBuildStatusGitHubEnterprise(rName string) string { - return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` -resource "aws_codebuild_project" "test" { - name = %[1]q - service_role = aws_iam_role.test.arn - - artifacts { - type = "NO_ARTIFACTS" - } - - environment { - compute_type = "BUILD_GENERAL1_SMALL" - image = "2" - type = "LINUX_CONTAINER" - } - - source { - location = "https://example.com/organization/repository.git" - type = "GITHUB_ENTERPRISE" - - build_status_config { - context = "codebuild" - target_url = "https://example.com/$${CODEBUILD_BUILD_ID}" - } - } -} -`, rName)) -} - func testAccProjectConfig_concurrentBuildLimit(rName string, concurrentBuildLimit int) string { return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` resource "aws_codebuild_project" "test" { @@ -5252,150 +5617,3 @@ resource "aws_codebuild_project" "test" { } `, concurrentBuildLimit, rName)) } - -func testAccProjectConfig_fileSystemLocations(rName, mountPoint string) string { - return acctest.ConfigCompose( - testAccProjectConfig_Base_ServiceRole(rName), - acctest.ConfigAvailableAZsNoOptIn(), - fmt.Sprintf(` -resource "aws_efs_file_system" "test" { - tags = { - Name = %[1]q - } -} - -resource "aws_vpc" "test" { - cidr_block = "10.0.0.0/16" - - tags = { - Name = %[1]q - } -} - -resource "aws_internet_gateway" "test" { - vpc_id = aws_vpc.test.id - - tags = { - Name = %[1]q - } -} - -resource "aws_subnet" "public" { - availability_zone = data.aws_availability_zones.available.names[0] - cidr_block = "10.0.0.0/24" - vpc_id = aws_vpc.test.id - - tags = { - Name = "%[1]s-public" - } -} - -resource "aws_route_table" "public" { - vpc_id = aws_vpc.test.id - - tags = { - Name = "%[1]s-public" - } -} - -resource "aws_route_table_association" "public" { - route_table_id = aws_route_table.public.id - subnet_id = aws_subnet.public.id -} - -resource "aws_route" "public" { - route_table_id = aws_route_table.public.id - destination_cidr_block = "0.0.0.0/0" - gateway_id = aws_internet_gateway.test.id -} - -resource "aws_subnet" "private" { - availability_zone = data.aws_availability_zones.available.names[0] - cidr_block = "10.0.1.0/24" - vpc_id = aws_vpc.test.id - - tags = { - Name = "%[1]s-private" - } -} - -resource "aws_eip" "test" { - domain = "vpc" - - tags = { - Name = %[1]q - } -} - -resource "aws_nat_gateway" "test" { - allocation_id = aws_eip.test.id - subnet_id = aws_subnet.public.id - - tags = { - Name = %[1]q - } - - depends_on = [aws_route.public] -} - -resource "aws_route_table" "private" { - vpc_id = aws_vpc.test.id - - tags = { - Name = "%[1]s-private" - } -} - -resource "aws_route_table_association" "private" { - route_table_id = aws_route.private.route_table_id - subnet_id = aws_subnet.private.id -} - -resource "aws_route" "private" { - route_table_id = aws_route_table.private.id - destination_cidr_block = "0.0.0.0/0" - nat_gateway_id = aws_nat_gateway.test.id -} - -resource "aws_security_group" "test" { - name = %[1]q - vpc_id = aws_vpc.test.id -} - -resource "aws_codebuild_project" "test" { - name = %[1]q - service_role = aws_iam_role.test.arn - - artifacts { - type = "NO_ARTIFACTS" - } - - environment { - compute_type = "BUILD_GENERAL1_SMALL" - image = "2" - type = "LINUX_CONTAINER" - - privileged_mode = true - } - - source { - type = "GITHUB" - location = "https://github.com/hashicorp/packer.git" - } - - vpc_config { - security_group_ids = [aws_security_group.test.id] - subnets = [aws_subnet.private.id] - vpc_id = aws_vpc.test.id - } - - file_system_locations { - identifier = "test" - location = "${aws_efs_file_system.test.dns_name}:/directory-path" - type = "EFS" - mount_point = %[2]q - mount_options = "nfsvers=4.1,rsize=1048576,wsize=1048576,hard,timeo=450,retrans=3" - } -} -`, rName, mountPoint)) -} From 00bfe9fbff6410766d0fdd596ec0996396257f1c Mon Sep 17 00:00:00 2001 From: Kit Ewbank Date: Tue, 23 Jan 2024 11:05:14 -0500 Subject: [PATCH 21/30] Tidy up 'TestAccCodeBuildProject_tags'. --- internal/service/codebuild/project_test.go | 259 ++++++++++++--------- internal/service/codebuild/webhook_test.go | 2 +- 2 files changed, 147 insertions(+), 114 deletions(-) diff --git a/internal/service/codebuild/project_test.go b/internal/service/codebuild/project_test.go index fd6330be256..8d9220cca75 100644 --- a/internal/service/codebuild/project_test.go +++ b/internal/service/codebuild/project_test.go @@ -173,7 +173,6 @@ func TestAccCodeBuildProject_disappears(t *testing.T) { }) } -// TODO func TestAccCodeBuildProject_tags(t *testing.T) { ctx := acctest.Context(t) var project types.Project @@ -191,12 +190,11 @@ func TestAccCodeBuildProject_tags(t *testing.T) { CheckDestroy: testAccCheckProjectDestroy(ctx), Steps: []resource.TestStep{ { - Config: testAccProjectConfig_tags(rName, "tag2", "tag2value"), + Config: testAccProjectConfig_tags1(rName, "key1", "value1"), Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists(ctx, resourceName, &project), - resource.TestCheckResourceAttr(resourceName, "tags.%", "2"), - resource.TestCheckResourceAttr(resourceName, "tags.tag1", "tag1value"), - resource.TestCheckResourceAttr(resourceName, "tags.tag2", "tag2value"), + resource.TestCheckResourceAttr(resourceName, "tags.%", "1"), + resource.TestCheckResourceAttr(resourceName, "tags.key1", "value1"), ), }, { @@ -205,12 +203,20 @@ func TestAccCodeBuildProject_tags(t *testing.T) { ImportStateVerify: true, }, { - Config: testAccProjectConfig_tags(rName, "tag2", "tag2value-updated"), + Config: testAccProjectConfig_tags2(rName, "key1", "value1updated", "key2", "value2"), Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists(ctx, resourceName, &project), resource.TestCheckResourceAttr(resourceName, "tags.%", "2"), - resource.TestCheckResourceAttr(resourceName, "tags.tag1", "tag1value"), - resource.TestCheckResourceAttr(resourceName, "tags.tag2", "tag2value-updated"), + resource.TestCheckResourceAttr(resourceName, "tags.key1", "value1updated"), + resource.TestCheckResourceAttr(resourceName, "tags.key2", "value2"), + ), + }, + { + Config: testAccProjectConfig_tags1(rName, "key2", "value2"), + Check: resource.ComposeTestCheckFunc( + testAccCheckProjectExists(ctx, resourceName, &project), + resource.TestCheckResourceAttr(resourceName, "tags.%", "1"), + resource.TestCheckResourceAttr(resourceName, "tags.key2", "value2"), ), }, }, @@ -2983,7 +2989,7 @@ func testAccPreCheck(ctx context.Context, t *testing.T) { } } -func testAccProjectConfig_Base_ServiceRole(rName string) string { +func testAccProjectConfig_baseServiceRole(rName string) string { return fmt.Sprintf(` resource "aws_iam_role" "test" { name = %[1]q @@ -3064,7 +3070,7 @@ POLICY } func testAccProjectConfig_basic(rName string) string { - return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_codebuild_project" "test" { name = %[1]q service_role = aws_iam_role.test.arn @@ -3087,8 +3093,65 @@ resource "aws_codebuild_project" "test" { `, rName, testAccGitHubSourceLocationFromEnv())) } +func testAccProjectConfig_tags1(rName, tagKey1, tagValue1 string) string { + return acctest.ConfigCompose(testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` +resource "aws_codebuild_project" "test" { + name = %[1]q + service_role = aws_iam_role.test.arn + + artifacts { + type = "NO_ARTIFACTS" + } + + environment { + compute_type = "BUILD_GENERAL1_SMALL" + image = "2" + type = "LINUX_CONTAINER" + } + + source { + location = "https://github.com/hashicorp/packer.git" + type = "GITHUB" + } + + tags = { + %[2]q = %[3]q + } +} +`, rName, tagKey1, tagValue1)) +} + +func testAccProjectConfig_tags2(rName, tagKey1, tagValue1, tagKey2, tagValue2 string) string { + return acctest.ConfigCompose(testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` +resource "aws_codebuild_project" "test" { + name = %[1]q + service_role = aws_iam_role.test.arn + + artifacts { + type = "NO_ARTIFACTS" + } + + environment { + compute_type = "BUILD_GENERAL1_SMALL" + image = "2" + type = "LINUX_CONTAINER" + } + + source { + location = "https://github.com/hashicorp/packer.git" + type = "GITHUB" + } + + tags = { + %[2]q = %[3]q + %[4]q = %[5]q + } +} +`, rName, tagKey1, tagValue1, tagKey2, tagValue2)) +} + func testAccProjectConfig_visibility(rName, visibility string) string { - return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_codebuild_project" "test" { name = %[1]q service_role = aws_iam_role.test.arn @@ -3113,7 +3176,7 @@ resource "aws_codebuild_project" "test" { } func testAccProjectConfig_visibilityResourceRole(rName, visibility string) string { - return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_codebuild_project" "test" { name = %[1]q service_role = aws_iam_role.test.arn @@ -3139,7 +3202,7 @@ resource "aws_codebuild_project" "test" { } func testAccProjectConfig_badgeEnabled(rName string, badgeEnabled bool) string { - return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_codebuild_project" "test" { badge_enabled = %[1]t name = %[2]q @@ -3164,7 +3227,7 @@ resource "aws_codebuild_project" "test" { } func testAccProjectConfig_buildTimeout(rName string, buildTimeout int) string { - return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_codebuild_project" "test" { build_timeout = %[1]d name = %[2]q @@ -3189,7 +3252,7 @@ resource "aws_codebuild_project" "test" { } func testAccProjectConfig_queuedTimeout(rName string, queuedTimeout int) string { - return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_codebuild_project" "test" { queued_timeout = %[1]d name = %[2]q @@ -3214,7 +3277,7 @@ resource "aws_codebuild_project" "test" { } func testAccProjectConfig_cache(rName, cacheLocation, cacheType string) string { - return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_s3_bucket" "test1" { bucket = "%[1]s-1" force_destroy = true @@ -3255,7 +3318,7 @@ resource "aws_codebuild_project" "test" { } func testAccProjectConfig_localCache(rName, modeType string) string { - return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_codebuild_project" "test" { name = %[1]q service_role = aws_iam_role.test.arn @@ -3284,7 +3347,7 @@ resource "aws_codebuild_project" "test" { } func testAccProjectConfig_s3ComputedLocation(rName string) string { - return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_s3_bucket" "test" { bucket_prefix = "cache" } @@ -3317,7 +3380,7 @@ resource "aws_codebuild_project" "test" { } func testAccProjectConfig_description(rName, description string) string { - return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_codebuild_project" "test" { description = %[1]q name = %[2]q @@ -3343,7 +3406,7 @@ resource "aws_codebuild_project" "test" { func testAccProjectConfig_fileSystemLocations(rName, mountPoint string) string { return acctest.ConfigCompose( - testAccProjectConfig_Base_ServiceRole(rName), + testAccProjectConfig_baseServiceRole(rName), acctest.ConfigAvailableAZsNoOptIn(), fmt.Sprintf(` resource "aws_efs_file_system" "test" { @@ -3489,7 +3552,7 @@ resource "aws_codebuild_project" "test" { } func testAccProjectConfig_sourceVersion(rName, sourceVersion string) string { - return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_codebuild_project" "test" { name = %[1]q service_role = aws_iam_role.test.arn @@ -3515,7 +3578,7 @@ resource "aws_codebuild_project" "test" { } func testAccProjectConfig_encryptionKey(rName string) string { - return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_kms_key" "test" { description = %[1]q deletion_window_in_days = 7 @@ -3545,7 +3608,7 @@ resource "aws_codebuild_project" "test" { } func testAccProjectConfig_environmentVariableOne(rName, key1, value1 string) string { - return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_codebuild_project" "test" { name = %[1]q service_role = aws_iam_role.test.arn @@ -3574,7 +3637,7 @@ resource "aws_codebuild_project" "test" { } func testAccProjectConfig_environmentVariableTwo(rName, key1, value1, key2, value2 string) string { - return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_codebuild_project" "test" { name = %[1]q service_role = aws_iam_role.test.arn @@ -3608,7 +3671,7 @@ resource "aws_codebuild_project" "test" { } func testAccProjectConfig_environmentVariableZero(rName string) string { - return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_codebuild_project" "test" { name = %[1]q service_role = aws_iam_role.test.arn @@ -3632,7 +3695,7 @@ resource "aws_codebuild_project" "test" { } func testAccProjectConfig_environmentVariableType(rName, environmentVariableType string) string { - return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_codebuild_project" "test" { name = %[1]q service_role = aws_iam_role.test.arn @@ -3668,7 +3731,7 @@ resource "aws_codebuild_project" "test" { func testAccProjectConfig_environmentCertificate(rName string, oName string) string { return acctest.ConfigCompose( - testAccProjectConfig_Base_ServiceRole(rName), + testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_s3_bucket" "test" { bucket = %[2]q @@ -3705,7 +3768,7 @@ resource "aws_codebuild_project" "test" { } func testAccProjectConfig_environmentRegistryCredential1(rName string) string { - return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_codebuild_project" "test" { name = %[1]q service_role = aws_iam_role.test.arn @@ -3745,7 +3808,7 @@ resource "aws_secretsmanager_secret_version" "test" { } func testAccProjectConfig_environmentRegistryCredential2(rName string) string { - return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_codebuild_project" "test" { name = %[1]q service_role = aws_iam_role.test.arn @@ -3785,7 +3848,7 @@ resource "aws_secretsmanager_secret_version" "test" { } func testAccProjectConfig_cloudWatchLogs(rName, status, gName, sName string) string { - return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_codebuild_project" "test" { name = %[1]q service_role = aws_iam_role.test.arn @@ -3818,7 +3881,7 @@ resource "aws_codebuild_project" "test" { func testAccProjectConfig_s3Logs(rName, status, location string, encryptionDisabled bool) string { return acctest.ConfigCompose( - testAccProjectConfig_Base_ServiceRole(rName), + testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_s3_bucket" "test" { bucket = %[1]q @@ -3856,7 +3919,7 @@ resource "aws_codebuild_project" "test" { } func testAccProjectConfig_buildBatch(rName string, combineArtifacts bool, computeTypesAllowed string, maximumBuildsAllowed, timeoutInMins int) string { - return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_codebuild_project" "test" { name = %[1]q service_role = aws_iam_role.test.arn @@ -3931,13 +3994,13 @@ build_batch_config { ` if withBuildBatchConfig { - return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(template, rName, buildBatchConfig)) + return acctest.ConfigCompose(testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(template, rName, buildBatchConfig)) } - return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(template, rName, "")) + return acctest.ConfigCompose(testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(template, rName, "")) } func testAccProjectConfig_sourceGitCloneDepth(rName string, gitCloneDepth int) string { - return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_codebuild_project" "test" { name = %[1]q service_role = aws_iam_role.test.arn @@ -3962,7 +4025,7 @@ resource "aws_codebuild_project" "test" { } func testAccProjectConfig_sourceGitSubmodulesCodeCommit(rName string, fetchSubmodules bool) string { - return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_codebuild_project" "test" { name = %[1]q service_role = aws_iam_role.test.arn @@ -3990,7 +4053,7 @@ resource "aws_codebuild_project" "test" { } func testAccProjectConfig_sourceGitSubmodulesGitHub(rName string, fetchSubmodules bool) string { - return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_codebuild_project" "test" { name = %[1]q service_role = aws_iam_role.test.arn @@ -4018,7 +4081,7 @@ resource "aws_codebuild_project" "test" { } func testAccProjectConfig_sourceGitSubmodulesGitHubEnterprise(rName string, fetchSubmodules bool) string { - return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_codebuild_project" "test" { name = %[1]q service_role = aws_iam_role.test.arn @@ -4046,7 +4109,7 @@ resource "aws_codebuild_project" "test" { } func testAccProjectConfig_secondarySourcesGitSubmodulesCodeCommit(rName string, fetchSubmodules bool) string { - return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_codebuild_project" "test" { name = %[1]q service_role = aws_iam_role.test.arn @@ -4094,7 +4157,7 @@ resource "aws_codebuild_project" "test" { } func testAccProjectConfig_secondarySourcesNone(rName string, fetchSubmodules bool) string { - return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_codebuild_project" "test" { name = %[1]q service_role = aws_iam_role.test.arn @@ -4122,7 +4185,7 @@ resource "aws_codebuild_project" "test" { } func testAccProjectConfig_secondarySourcesGitSubmodulesGitHub(rName string, fetchSubmodules bool) string { - return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_codebuild_project" "test" { name = %[1]q service_role = aws_iam_role.test.arn @@ -4170,7 +4233,7 @@ resource "aws_codebuild_project" "test" { } func testAccProjectConfig_secondarySourcesGitSubmodulesGitHubEnterprise(rName string, fetchSubmodules bool) string { - return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_codebuild_project" "test" { name = %[1]q service_role = aws_iam_role.test.arn @@ -4218,7 +4281,7 @@ resource "aws_codebuild_project" "test" { } func testAccProjectConfig_secondarySourceVersionsCodeCommit(rName string) string { - return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_codebuild_project" "test" { name = %[1]q service_role = aws_iam_role.test.arn @@ -4259,7 +4322,7 @@ resource "aws_codebuild_project" "test" { } func testAccProjectConfig_secondarySourceVersionsCodeCommitUpdated(rName string) string { - return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_codebuild_project" "test" { name = %[1]q service_role = aws_iam_role.test.arn @@ -4305,7 +4368,7 @@ resource "aws_codebuild_project" "test" { } func testAccProjectConfig_sourceInsecureSSL(rName string, insecureSSL bool) string { - return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_codebuild_project" "test" { name = %[1]q service_role = aws_iam_role.test.arn @@ -4330,7 +4393,7 @@ resource "aws_codebuild_project" "test" { } func testAccProjectConfig_sourceBuildStatusGitHubEnterprise(rName string) string { - return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_codebuild_project" "test" { name = %[1]q service_role = aws_iam_role.test.arn @@ -4359,7 +4422,7 @@ resource "aws_codebuild_project" "test" { } func testAccProjectConfig_sourceReportBuildStatusGitHubEnterprise(rName string, reportBuildStatus bool) string { - return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_codebuild_project" "test" { name = %[1]q service_role = aws_iam_role.test.arn @@ -4384,7 +4447,7 @@ resource "aws_codebuild_project" "test" { } func testAccProjectConfig_sourceReportBuildStatusBitbucket(rName, sourceLocation string, reportBuildStatus bool) string { - return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_codebuild_project" "test" { name = %[1]q service_role = aws_iam_role.test.arn @@ -4409,7 +4472,7 @@ resource "aws_codebuild_project" "test" { } func testAccProjectConfig_sourceReportBuildStatusGitHub(rName string, reportBuildStatus bool) string { - return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_codebuild_project" "test" { name = %[1]q service_role = aws_iam_role.test.arn @@ -4434,7 +4497,7 @@ resource "aws_codebuild_project" "test" { } func testAccProjectConfig_sourceTypeBitbucket(rName, sourceLocation string) string { - return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_codebuild_project" "test" { name = %[1]q service_role = aws_iam_role.test.arn @@ -4458,7 +4521,7 @@ resource "aws_codebuild_project" "test" { } func testAccProjectConfig_sourceTypeCodeCommit(rName string) string { - return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_codebuild_project" "test" { name = %[1]q service_role = aws_iam_role.test.arn @@ -4482,7 +4545,7 @@ resource "aws_codebuild_project" "test" { } func testAccProjectConfig_sourceTypeCodePipeline(rName string) string { - return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_codebuild_project" "test" { name = %[1]q service_role = aws_iam_role.test.arn @@ -4505,7 +4568,7 @@ resource "aws_codebuild_project" "test" { } func testAccProjectConfig_sourceTypeGitHubEnterprise(rName string) string { - return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_codebuild_project" "test" { name = %[1]q service_role = aws_iam_role.test.arn @@ -4529,7 +4592,7 @@ resource "aws_codebuild_project" "test" { } func testAccProjectConfig_sourceTypeS3(rName string) string { - return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_s3_bucket" "test" { bucket = %[1]q } @@ -4563,7 +4626,7 @@ resource "aws_codebuild_project" "test" { } func testAccProjectConfig_sourceTypeNoSource(rName string, rLocation string, rBuildspec string) string { - return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_codebuild_project" "test" { name = %[1]q service_role = aws_iam_role.test.arn @@ -4587,39 +4650,9 @@ resource "aws_codebuild_project" "test" { `, rName, rLocation, rBuildspec)) } -func testAccProjectConfig_tags(rName, tagKey, tagValue string) string { - return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` -resource "aws_codebuild_project" "test" { - name = %[1]q - service_role = aws_iam_role.test.arn - - artifacts { - type = "NO_ARTIFACTS" - } - - environment { - compute_type = "BUILD_GENERAL1_SMALL" - image = "2" - type = "LINUX_CONTAINER" - } - - source { - location = "https://github.com/hashicorp/packer.git" - type = "GITHUB" - } - - tags = { - tag1 = "tag1value" - - %[2]s = %[3]q - } -} -`, rName, tagKey, tagValue)) -} - func testAccProjectConfig_vpc1(rName string) string { return acctest.ConfigCompose( - testAccProjectConfig_Base_ServiceRole(rName), + testAccProjectConfig_baseServiceRole(rName), acctest.ConfigAvailableAZsNoOptIn(), fmt.Sprintf(` resource "aws_vpc" "test" { @@ -4677,7 +4710,7 @@ resource "aws_codebuild_project" "test" { func testAccProjectConfig_vpc2(rName string) string { return acctest.ConfigCompose( - testAccProjectConfig_Base_ServiceRole(rName), + testAccProjectConfig_baseServiceRole(rName), acctest.ConfigAvailableAZsNoOptIn(), fmt.Sprintf(` resource "aws_vpc" "test" { @@ -4734,7 +4767,7 @@ resource "aws_codebuild_project" "test" { } func testAccProjectConfig_windowsServer2019Container(rName string) string { - return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_codebuild_project" "test" { name = %[1]q service_role = aws_iam_role.test.arn @@ -4758,7 +4791,7 @@ resource "aws_codebuild_project" "test" { } func testAccProjectConfig_armContainer(rName string) string { - return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_codebuild_project" "test" { name = %[1]q service_role = aws_iam_role.test.arn @@ -4782,7 +4815,7 @@ resource "aws_codebuild_project" "test" { } func testAccProjectConfig_linuxLambdaContainer(rName string) string { - return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_codebuild_project" "test" { name = %[1]q service_role = aws_iam_role.test.arn @@ -4807,7 +4840,7 @@ resource "aws_codebuild_project" "test" { func testAccProjectConfig_artifactsArtifactIdentifier(rName string, artifactIdentifier string) string { return acctest.ConfigCompose( - testAccProjectConfig_Base_ServiceRole(rName), + testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_s3_bucket" "test" { bucket = %[1]q @@ -4840,7 +4873,7 @@ resource "aws_codebuild_project" "test" { func testAccProjectConfig_artifactsEncryptionDisabled(rName string, encryptionDisabled bool) string { return acctest.ConfigCompose( - testAccProjectConfig_Base_ServiceRole(rName), + testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_s3_bucket" "test" { bucket = %[1]q @@ -4873,7 +4906,7 @@ resource "aws_codebuild_project" "test" { func testAccProjectConfig_artifactsLocation(rName, bucketName string) string { return acctest.ConfigCompose( - testAccProjectConfig_Base_ServiceRole(rName), + testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_s3_bucket" "test" { bucket = %[2]q @@ -4905,7 +4938,7 @@ resource "aws_codebuild_project" "test" { func testAccProjectConfig_artifactsName(rName string, name string) string { return acctest.ConfigCompose( - testAccProjectConfig_Base_ServiceRole(rName), + testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_s3_bucket" "test" { bucket = %[1]q @@ -4938,7 +4971,7 @@ resource "aws_codebuild_project" "test" { func testAccProjectConfig_artifactsNamespaceType(rName, namespaceType string) string { return acctest.ConfigCompose( - testAccProjectConfig_Base_ServiceRole(rName), + testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_s3_bucket" "test" { bucket = %[1]q @@ -4971,7 +5004,7 @@ resource "aws_codebuild_project" "test" { func testAccProjectConfig_artifactsOverrideArtifactName(rName string, overrideArtifactName bool) string { return acctest.ConfigCompose( - testAccProjectConfig_Base_ServiceRole(rName), + testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_s3_bucket" "test" { bucket = %[1]q @@ -5004,7 +5037,7 @@ resource "aws_codebuild_project" "test" { func testAccProjectConfig_artifactsPackaging(rName, packaging string) string { return acctest.ConfigCompose( - testAccProjectConfig_Base_ServiceRole(rName), + testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_s3_bucket" "test" { bucket = %[1]q @@ -5037,7 +5070,7 @@ resource "aws_codebuild_project" "test" { func testAccProjectConfig_artifactsPath(rName, path string) string { return acctest.ConfigCompose( - testAccProjectConfig_Base_ServiceRole(rName), + testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_s3_bucket" "test" { bucket = %[1]q @@ -5070,7 +5103,7 @@ resource "aws_codebuild_project" "test" { func testAccProjectConfig_artifactsType(rName string, artifactType string) string { return acctest.ConfigCompose( - testAccProjectConfig_Base_ServiceRole(rName), + testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_s3_bucket" "test" { bucket = %[1]q @@ -5102,7 +5135,7 @@ resource "aws_codebuild_project" "test" { func testAccProjectConfig_artifactsBucketOwnerAccess(rName string, typ string) string { return acctest.ConfigCompose( - testAccProjectConfig_Base_ServiceRole(rName), + testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_s3_bucket" "test" { bucket = %[1]q @@ -5135,7 +5168,7 @@ resource "aws_codebuild_project" "test" { func testAccProjectConfig_secondaryArtifacts(rName string) string { return acctest.ConfigCompose( - testAccProjectConfig_Base_ServiceRole(rName), + testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_s3_bucket" "test" { bucket = %[1]q @@ -5179,7 +5212,7 @@ resource "aws_codebuild_project" "test" { func testAccProjectConfig_secondaryArtifactsNone(rName string) string { return acctest.ConfigCompose( - testAccProjectConfig_Base_ServiceRole(rName), + testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_s3_bucket" "test" { bucket = %[1]q @@ -5211,7 +5244,7 @@ resource "aws_codebuild_project" "test" { func testAccProjectConfig_secondaryArtifactsArtifactIdentifier(rName string, artifactIdentifier string) string { return acctest.ConfigCompose( - testAccProjectConfig_Base_ServiceRole(rName), + testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_s3_bucket" "test" { bucket = %[1]q @@ -5249,7 +5282,7 @@ resource "aws_codebuild_project" "test" { func testAccProjectConfig_secondaryArtifactsEncryptionDisabled(rName string, encryptionDisabled bool) string { return acctest.ConfigCompose( - testAccProjectConfig_Base_ServiceRole(rName), + testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_s3_bucket" "test" { bucket = %[1]q @@ -5288,7 +5321,7 @@ resource "aws_codebuild_project" "test" { func testAccProjectConfig_secondaryArtifactsLocation(rName, bucketName string) string { return acctest.ConfigCompose( - testAccProjectConfig_Base_ServiceRole(rName), + testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_s3_bucket" "test" { bucket = %[2]q @@ -5326,7 +5359,7 @@ resource "aws_codebuild_project" "test" { func testAccProjectConfig_secondaryArtifactsName(rName string, name string) string { return acctest.ConfigCompose( - testAccProjectConfig_Base_ServiceRole(rName), + testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_s3_bucket" "test" { bucket = %[1]q @@ -5365,7 +5398,7 @@ resource "aws_codebuild_project" "test" { func testAccProjectConfig_secondaryArtifactsNamespaceType(rName, namespaceType string) string { return acctest.ConfigCompose( - testAccProjectConfig_Base_ServiceRole(rName), + testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_s3_bucket" "test" { bucket = %[1]q @@ -5404,7 +5437,7 @@ resource "aws_codebuild_project" "test" { func testAccProjectConfig_secondaryArtifactsOverrideArtifactName(rName string, overrideArtifactName bool) string { return acctest.ConfigCompose( - testAccProjectConfig_Base_ServiceRole(rName), + testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_s3_bucket" "test" { bucket = %[1]q @@ -5443,7 +5476,7 @@ resource "aws_codebuild_project" "test" { func testAccProjectConfig_secondaryArtifactsPath(rName, path string) string { return acctest.ConfigCompose( - testAccProjectConfig_Base_ServiceRole(rName), + testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_s3_bucket" "test" { bucket = %[1]q @@ -5482,7 +5515,7 @@ resource "aws_codebuild_project" "test" { func testAccProjectConfig_secondaryArtifactsPackaging(rName, packaging string) string { return acctest.ConfigCompose( - testAccProjectConfig_Base_ServiceRole(rName), + testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_s3_bucket" "test" { bucket = %[1]q @@ -5521,7 +5554,7 @@ resource "aws_codebuild_project" "test" { func testAccProjectConfig_secondaryArtifactsType(rName string, artifactType string) string { return acctest.ConfigCompose( - testAccProjectConfig_Base_ServiceRole(rName), + testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_s3_bucket" "test" { bucket = %[1]q @@ -5558,7 +5591,7 @@ resource "aws_codebuild_project" "test" { } func testAccProjectConfig_secondarySourcesCodeCommit(rName string) string { - return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_codebuild_project" "test" { name = %[1]q service_role = aws_iam_role.test.arn @@ -5594,7 +5627,7 @@ resource "aws_codebuild_project" "test" { } func testAccProjectConfig_concurrentBuildLimit(rName string, concurrentBuildLimit int) string { - return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_codebuild_project" "test" { concurrent_build_limit = %[1]d name = %[2]q diff --git a/internal/service/codebuild/webhook_test.go b/internal/service/codebuild/webhook_test.go index 75abcac7d23..6cac83b48f5 100644 --- a/internal/service/codebuild/webhook_test.go +++ b/internal/service/codebuild/webhook_test.go @@ -429,7 +429,7 @@ resource "aws_codebuild_webhook" "test" { } func testAccWebhookConfig_gitHubEnterprise(rName string, branchFilter string) string { - return acctest.ConfigCompose(testAccProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(` resource "aws_codebuild_project" "test" { name = %[1]q service_role = aws_iam_role.test.arn From 94704bd57338daa3e960cb13bba87e6452fc4488 Mon Sep 17 00:00:00 2001 From: Kit Ewbank Date: Tue, 23 Jan 2024 11:37:55 -0500 Subject: [PATCH 22/30] r/aws_codebuild_project: Fix some 'ImportStateVerify attributes not equivalent' acceptance test errors (#35108). --- internal/service/codebuild/project_test.go | 30 ---------------------- 1 file changed, 30 deletions(-) diff --git a/internal/service/codebuild/project_test.go b/internal/service/codebuild/project_test.go index 8d9220cca75..e47f23ad933 100644 --- a/internal/service/codebuild/project_test.go +++ b/internal/service/codebuild/project_test.go @@ -1228,11 +1228,6 @@ func TestAccCodeBuildProject_SecondarySourcesGitSubmodules_codeCommit(t *testing }), ), }, - { - ResourceName: resourceName, - ImportState: true, - ImportStateVerify: true, - }, { Config: testAccProjectConfig_secondarySourcesGitSubmodulesCodeCommit(rName, false), Check: resource.ComposeTestCheckFunc( @@ -1283,11 +1278,6 @@ func TestAccCodeBuildProject_SecondarySourcesGitSubmodules_gitHub(t *testing.T) testAccCheckProjectExists(ctx, resourceName, &project), ), }, - { - ResourceName: resourceName, - ImportState: true, - ImportStateVerify: true, - }, { Config: testAccProjectConfig_secondarySourcesGitSubmodulesGitHub(rName, false), Check: resource.ComposeTestCheckFunc( @@ -1320,11 +1310,6 @@ func TestAccCodeBuildProject_SecondarySourcesGitSubmodules_gitHubEnterprise(t *t testAccCheckProjectExists(ctx, resourceName, &project), ), }, - { - ResourceName: resourceName, - ImportState: true, - ImportStateVerify: true, - }, { Config: testAccProjectConfig_secondarySourcesGitSubmodulesGitHubEnterprise(rName, false), Check: resource.ComposeTestCheckFunc( @@ -1365,11 +1350,6 @@ func TestAccCodeBuildProject_SecondarySourcesVersions(t *testing.T) { }), ), }, - { - ResourceName: resourceName, - ImportState: true, - ImportStateVerify: true, - }, { Config: testAccProjectConfig_secondarySourceVersionsCodeCommitUpdated(rName), Check: resource.ComposeTestCheckFunc( @@ -2837,11 +2817,6 @@ func TestAccCodeBuildProject_SecondaryArtifacts_type(t *testing.T) { }), ), }, - { - ResourceName: resourceName, - ImportState: true, - ImportStateVerify: true, - }, }, }) } @@ -2871,11 +2846,6 @@ func TestAccCodeBuildProject_SecondarySources_codeCommit(t *testing.T) { }), ), }, - { - ResourceName: resourceName, - ImportState: true, - ImportStateVerify: true, - }, }, }) } From 220eebfaab6bb235ee4a0dfb8e2ed8d4482d2d73 Mon Sep 17 00:00:00 2001 From: Kit Ewbank Date: Tue, 23 Jan 2024 11:38:43 -0500 Subject: [PATCH 23/30] r/aws_codebuild_project: Fix diff suppression for 'secondary_artifacts' and 'artifacts'. --- internal/service/codebuild/project.go | 93 +++++++++++++++++++++------ 1 file changed, 73 insertions(+), 20 deletions(-) diff --git a/internal/service/codebuild/project.go b/internal/service/codebuild/project.go index d3ae32cf2dc..6d460fe011b 100644 --- a/internal/service/codebuild/project.go +++ b/internal/service/codebuild/project.go @@ -4,6 +4,7 @@ package codebuild import ( + "bytes" "context" "fmt" "log" @@ -17,6 +18,7 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" "github.com/hashicorp/terraform-provider-aws/internal/conns" + "github.com/hashicorp/terraform-provider-aws/internal/create" "github.com/hashicorp/terraform-provider-aws/internal/enum" "github.com/hashicorp/terraform-provider-aws/internal/errs/sdkdiag" "github.com/hashicorp/terraform-provider-aws/internal/flex" @@ -83,10 +85,10 @@ func resourceProject() *schema.Resource { Type: schema.TypeString, Optional: true, DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool { - if d.Get("artifacts.0.type") == types.ArtifactsTypeS3 { + if artifactType := types.ArtifactsType(d.Get("artifacts.0.type").(string)); artifactType == types.ArtifactsTypeS3 { return types.ArtifactNamespace(old) == types.ArtifactNamespaceNone && new == "" } - return false + return old == new }, ValidateDiagFunc: enum.Validate[types.ArtifactNamespace](), }, @@ -99,13 +101,14 @@ func resourceProject() *schema.Resource { Type: schema.TypeString, Optional: true, DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool { - switch d.Get("artifacts.0.type") { + switch artifactType := types.ArtifactsType(d.Get("artifacts.0.type").(string)); artifactType { case types.ArtifactsTypeCodepipeline: return new == "" case types.ArtifactsTypeS3: return types.ArtifactPackaging(old) == types.ArtifactPackagingNone && new == "" + default: + return old == new } - return false }, ValidateDiagFunc: enum.Validate[types.ArtifactPackaging](), }, @@ -181,13 +184,12 @@ func resourceProject() *schema.Resource { Default: 60, ValidateFunc: validation.IntBetween(5, 480), DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool { - if d.Get("environment.0.type") == types.EnvironmentTypeArmLambdaContainer { + switch environmentType := types.EnvironmentType(d.Get("environment.0.type").(string)); environmentType { + case types.EnvironmentTypeArmLambdaContainer, types.EnvironmentTypeLinuxLambdaContainer: return true + default: + return old == new } - if d.Get("environment.0.type") == types.EnvironmentTypeLinuxLambdaContainer { - return true - } - return false }, }, "cache": { @@ -430,13 +432,12 @@ func resourceProject() *schema.Resource { Default: 480, ValidateFunc: validation.IntBetween(5, 480), DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool { - if d.Get("environment.0.type") == types.EnvironmentTypeArmLambdaContainer { - return true - } - if d.Get("environment.0.type") == types.EnvironmentTypeLinuxLambdaContainer { + switch environmentType := types.EnvironmentType(d.Get("environment.0.type").(string)); environmentType { + case types.EnvironmentTypeArmLambdaContainer, types.EnvironmentTypeLinuxLambdaContainer: return true + default: + return old == new } - return false }, }, "resource_access_role": { @@ -448,6 +449,7 @@ func resourceProject() *schema.Resource { Type: schema.TypeSet, Optional: true, MaxItems: 12, + Set: resourceProjectArtifactsHash, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ "artifact_identifier": { @@ -861,7 +863,7 @@ func resourceProjectRead(ctx context.Context, d *schema.ResourceData, meta inter d.Set("arn", project.Arn) if project.Artifacts != nil { - if err := d.Set("artifacts", []interface{}{flattenProjectArtifacts(*project.Artifacts)}); err != nil { + if err := d.Set("artifacts", []interface{}{flattenProjectArtifacts(project.Artifacts)}); err != nil { return sdkdiag.AppendErrorf(diags, "setting artifacts: %s", err) } } else { @@ -909,7 +911,7 @@ func resourceProjectRead(ctx context.Context, d *schema.ResourceData, meta inter } d.Set("service_role", project.ServiceRole) if project.Source != nil { - if err := d.Set("source", []interface{}{flattenProjectSource(*project.Source)}); err != nil { + if err := d.Set("source", []interface{}{flattenProjectSource(project.Source)}); err != nil { return sdkdiag.AppendErrorf(diags, "setting source: %s", err) } } else { @@ -1696,12 +1698,16 @@ func flattenProjectSecondaryArtifacts(apiObjects []types.ProjectArtifacts) []int tfList := []interface{}{} for _, apiObject := range apiObjects { - tfList = append(tfList, flattenProjectArtifacts(apiObject)) + tfList = append(tfList, flattenProjectArtifacts(&apiObject)) } return tfList } -func flattenProjectArtifacts(apiObject types.ProjectArtifacts) map[string]interface{} { +func flattenProjectArtifacts(apiObject *types.ProjectArtifacts) map[string]interface{} { + if apiObject == nil { + return nil + } + tfMap := map[string]interface{}{ "bucket_owner_access": apiObject.BucketOwnerAccess, "namespace_type": apiObject.NamespaceType, @@ -1736,6 +1742,49 @@ func flattenProjectArtifacts(apiObject types.ProjectArtifacts) map[string]interf return tfMap } +func resourceProjectArtifactsHash(v interface{}) int { + var buf bytes.Buffer + tfMap := v.(map[string]interface{}) + + if v, ok := tfMap["artifact_identifier"]; ok { + buf.WriteString(fmt.Sprintf("%s-", v.(string))) + } + + if v, ok := tfMap["bucket_owner_access"]; ok { + buf.WriteString(fmt.Sprintf("%s-", v.(string))) + } + + if v, ok := tfMap["encryption_disabled"]; ok { + buf.WriteString(fmt.Sprintf("%t-", v.(bool))) + } + + if v, ok := tfMap["location"]; ok { + buf.WriteString(fmt.Sprintf("%s-", v.(string))) + } + + if v, ok := tfMap["namespace_type"]; ok { + buf.WriteString(fmt.Sprintf("%s-", v.(string))) + } + + if v, ok := tfMap["override_artifact_name"]; ok { + buf.WriteString(fmt.Sprintf("%t-", v.(bool))) + } + + if v, ok := tfMap["packaging"]; ok { + buf.WriteString(fmt.Sprintf("%s-", v.(string))) + } + + if v, ok := tfMap["path"]; ok { + buf.WriteString(fmt.Sprintf("%s-", v.(string))) + } + + if v, ok := tfMap["type"]; ok { + buf.WriteString(fmt.Sprintf("%s-", v.(string))) + } + + return create.StringHashcode(buf.String()) +} + func flattenProjectCache(apiObject *types.ProjectCache) []interface{} { if apiObject == nil { return []interface{}{} @@ -1786,13 +1835,17 @@ func flattenProjectSecondarySources(apiObject []types.ProjectSource) []interface tfList := make([]interface{}, 0) for _, apiObject := range apiObject { - tfList = append(tfList, flattenProjectSource(apiObject)) + tfList = append(tfList, flattenProjectSource(&apiObject)) } return tfList } -func flattenProjectSource(apiObject types.ProjectSource) interface{} { +func flattenProjectSource(apiObject *types.ProjectSource) map[string]interface{} { + if apiObject == nil { + return nil + } + tfMap := map[string]interface{}{ "buildspec": aws.ToString(apiObject.Buildspec), "location": aws.ToString(apiObject.Location), From 2dc8da95d622522200df06a026c239108e18da85 Mon Sep 17 00:00:00 2001 From: Kit Ewbank Date: Tue, 23 Jan 2024 11:44:13 -0500 Subject: [PATCH 24/30] Acceptance test output: % make testacc TESTARGS='-run=TestAccCodeBuildProject_' PKG=codebuild ACCTEST_PARALLELISM=2 ==> Checking that code complies with gofmt requirements... TF_ACC=1 go test ./internal/service/codebuild/... -v -count 1 -parallel 2 -run=TestAccCodeBuildProject_ -timeout 360m === RUN TestAccCodeBuildProject_basic === PAUSE TestAccCodeBuildProject_basic === RUN TestAccCodeBuildProject_disappears === PAUSE TestAccCodeBuildProject_disappears === RUN TestAccCodeBuildProject_tags === PAUSE TestAccCodeBuildProject_tags === RUN TestAccCodeBuildProject_publicVisibility === PAUSE TestAccCodeBuildProject_publicVisibility === RUN TestAccCodeBuildProject_badgeEnabled === PAUSE TestAccCodeBuildProject_badgeEnabled === RUN TestAccCodeBuildProject_buildTimeout === PAUSE TestAccCodeBuildProject_buildTimeout === RUN TestAccCodeBuildProject_queuedTimeout === PAUSE TestAccCodeBuildProject_queuedTimeout === RUN TestAccCodeBuildProject_cache === PAUSE TestAccCodeBuildProject_cache === RUN TestAccCodeBuildProject_description === PAUSE TestAccCodeBuildProject_description === RUN TestAccCodeBuildProject_fileSystemLocations === PAUSE TestAccCodeBuildProject_fileSystemLocations === RUN TestAccCodeBuildProject_sourceVersion === PAUSE TestAccCodeBuildProject_sourceVersion === RUN TestAccCodeBuildProject_encryptionKey === PAUSE TestAccCodeBuildProject_encryptionKey === RUN TestAccCodeBuildProject_Environment_environmentVariable === PAUSE TestAccCodeBuildProject_Environment_environmentVariable === RUN TestAccCodeBuildProject_EnvironmentEnvironmentVariable_type === PAUSE TestAccCodeBuildProject_EnvironmentEnvironmentVariable_type === RUN TestAccCodeBuildProject_EnvironmentEnvironmentVariable_value === PAUSE TestAccCodeBuildProject_EnvironmentEnvironmentVariable_value === RUN TestAccCodeBuildProject_Environment_certificate === PAUSE TestAccCodeBuildProject_Environment_certificate === RUN TestAccCodeBuildProject_Environment_registryCredential === PAUSE TestAccCodeBuildProject_Environment_registryCredential === RUN TestAccCodeBuildProject_Logs_cloudWatchLogs === PAUSE TestAccCodeBuildProject_Logs_cloudWatchLogs === RUN TestAccCodeBuildProject_Logs_s3Logs === PAUSE TestAccCodeBuildProject_Logs_s3Logs === RUN TestAccCodeBuildProject_buildBatch === PAUSE TestAccCodeBuildProject_buildBatch === RUN TestAccCodeBuildProject_buildBatchConfigDelete === PAUSE TestAccCodeBuildProject_buildBatchConfigDelete === RUN TestAccCodeBuildProject_Source_gitCloneDepth === PAUSE TestAccCodeBuildProject_Source_gitCloneDepth === RUN TestAccCodeBuildProject_SourceGitSubmodules_codeCommit === PAUSE TestAccCodeBuildProject_SourceGitSubmodules_codeCommit === RUN TestAccCodeBuildProject_SourceGitSubmodules_gitHub === PAUSE TestAccCodeBuildProject_SourceGitSubmodules_gitHub === RUN TestAccCodeBuildProject_SourceGitSubmodules_gitHubEnterprise === PAUSE TestAccCodeBuildProject_SourceGitSubmodules_gitHubEnterprise === RUN TestAccCodeBuildProject_SecondarySourcesGitSubmodules_codeCommit === PAUSE TestAccCodeBuildProject_SecondarySourcesGitSubmodules_codeCommit === RUN TestAccCodeBuildProject_SecondarySourcesGitSubmodules_gitHub === PAUSE TestAccCodeBuildProject_SecondarySourcesGitSubmodules_gitHub === RUN TestAccCodeBuildProject_SecondarySourcesGitSubmodules_gitHubEnterprise === PAUSE TestAccCodeBuildProject_SecondarySourcesGitSubmodules_gitHubEnterprise === RUN TestAccCodeBuildProject_SecondarySourcesVersions === PAUSE TestAccCodeBuildProject_SecondarySourcesVersions === RUN TestAccCodeBuildProject_Source_insecureSSL === PAUSE TestAccCodeBuildProject_Source_insecureSSL === RUN TestAccCodeBuildProject_SourceBuildStatus_gitHubEnterprise === PAUSE TestAccCodeBuildProject_SourceBuildStatus_gitHubEnterprise === RUN TestAccCodeBuildProject_SourceReportBuildStatus_gitHubEnterprise === PAUSE TestAccCodeBuildProject_SourceReportBuildStatus_gitHubEnterprise === RUN TestAccCodeBuildProject_SourceReportBuildStatus_bitbucket === PAUSE TestAccCodeBuildProject_SourceReportBuildStatus_bitbucket === RUN TestAccCodeBuildProject_SourceReportBuildStatus_gitHub === PAUSE TestAccCodeBuildProject_SourceReportBuildStatus_gitHub === RUN TestAccCodeBuildProject_SourceType_bitbucket === PAUSE TestAccCodeBuildProject_SourceType_bitbucket === RUN TestAccCodeBuildProject_SourceType_codeCommit === PAUSE TestAccCodeBuildProject_SourceType_codeCommit === RUN TestAccCodeBuildProject_SourceType_codePipeline === PAUSE TestAccCodeBuildProject_SourceType_codePipeline === RUN TestAccCodeBuildProject_SourceType_gitHubEnterprise === PAUSE TestAccCodeBuildProject_SourceType_gitHubEnterprise === RUN TestAccCodeBuildProject_SourceType_s3 === PAUSE TestAccCodeBuildProject_SourceType_s3 === RUN TestAccCodeBuildProject_SourceType_noSource === PAUSE TestAccCodeBuildProject_SourceType_noSource === RUN TestAccCodeBuildProject_SourceType_noSourceInvalid === PAUSE TestAccCodeBuildProject_SourceType_noSourceInvalid === RUN TestAccCodeBuildProject_vpc === PAUSE TestAccCodeBuildProject_vpc === RUN TestAccCodeBuildProject_windowsServer2019Container === PAUSE TestAccCodeBuildProject_windowsServer2019Container === RUN TestAccCodeBuildProject_armContainer === PAUSE TestAccCodeBuildProject_armContainer === RUN TestAccCodeBuildProject_linuxLambdaContainer === PAUSE TestAccCodeBuildProject_linuxLambdaContainer === RUN TestAccCodeBuildProject_Artifacts_artifactIdentifier === PAUSE TestAccCodeBuildProject_Artifacts_artifactIdentifier === RUN TestAccCodeBuildProject_Artifacts_encryptionDisabled === PAUSE TestAccCodeBuildProject_Artifacts_encryptionDisabled === RUN TestAccCodeBuildProject_Artifacts_location === PAUSE TestAccCodeBuildProject_Artifacts_location === RUN TestAccCodeBuildProject_Artifacts_name === PAUSE TestAccCodeBuildProject_Artifacts_name === RUN TestAccCodeBuildProject_Artifacts_namespaceType === PAUSE TestAccCodeBuildProject_Artifacts_namespaceType === RUN TestAccCodeBuildProject_Artifacts_overrideArtifactName === PAUSE TestAccCodeBuildProject_Artifacts_overrideArtifactName === RUN TestAccCodeBuildProject_Artifacts_packaging === PAUSE TestAccCodeBuildProject_Artifacts_packaging === RUN TestAccCodeBuildProject_Artifacts_path === PAUSE TestAccCodeBuildProject_Artifacts_path === RUN TestAccCodeBuildProject_Artifacts_type === PAUSE TestAccCodeBuildProject_Artifacts_type === RUN TestAccCodeBuildProject_Artifacts_bucketOwnerAccess === PAUSE TestAccCodeBuildProject_Artifacts_bucketOwnerAccess === RUN TestAccCodeBuildProject_secondaryArtifacts === PAUSE TestAccCodeBuildProject_secondaryArtifacts === RUN TestAccCodeBuildProject_SecondaryArtifacts_artifactIdentifier === PAUSE TestAccCodeBuildProject_SecondaryArtifacts_artifactIdentifier === RUN TestAccCodeBuildProject_SecondaryArtifacts_overrideArtifactName === PAUSE TestAccCodeBuildProject_SecondaryArtifacts_overrideArtifactName === RUN TestAccCodeBuildProject_SecondaryArtifacts_encryptionDisabled === PAUSE TestAccCodeBuildProject_SecondaryArtifacts_encryptionDisabled === RUN TestAccCodeBuildProject_SecondaryArtifacts_location === PAUSE TestAccCodeBuildProject_SecondaryArtifacts_location === RUN TestAccCodeBuildProject_SecondaryArtifacts_name acctest.go:93: Currently no solution to allow updates on name attribute --- SKIP: TestAccCodeBuildProject_SecondaryArtifacts_name (0.00s) === RUN TestAccCodeBuildProject_SecondaryArtifacts_namespaceType === PAUSE TestAccCodeBuildProject_SecondaryArtifacts_namespaceType === RUN TestAccCodeBuildProject_SecondaryArtifacts_path === PAUSE TestAccCodeBuildProject_SecondaryArtifacts_path === RUN TestAccCodeBuildProject_SecondaryArtifacts_packaging === PAUSE TestAccCodeBuildProject_SecondaryArtifacts_packaging === RUN TestAccCodeBuildProject_SecondaryArtifacts_type === PAUSE TestAccCodeBuildProject_SecondaryArtifacts_type === RUN TestAccCodeBuildProject_SecondarySources_codeCommit === PAUSE TestAccCodeBuildProject_SecondarySources_codeCommit === RUN TestAccCodeBuildProject_concurrentBuildLimit === PAUSE TestAccCodeBuildProject_concurrentBuildLimit === CONT TestAccCodeBuildProject_basic === CONT TestAccCodeBuildProject_SecondarySources_codeCommit === NAME TestAccCodeBuildProject_basic source_credential_test.go:144: skipping acceptance testing: Source Credentials (GITHUB) not found --- SKIP: TestAccCodeBuildProject_basic (0.86s) === CONT TestAccCodeBuildProject_SourceReportBuildStatus_bitbucket source_credential_test.go:144: skipping acceptance testing: Source Credentials (BITBUCKET) not found --- SKIP: TestAccCodeBuildProject_SourceReportBuildStatus_bitbucket (0.18s) === CONT TestAccCodeBuildProject_SecondaryArtifacts_type --- PASS: TestAccCodeBuildProject_SecondaryArtifacts_type (25.55s) === CONT TestAccCodeBuildProject_SecondaryArtifacts_packaging source_credential_test.go:144: skipping acceptance testing: Source Credentials (GITHUB) not found --- SKIP: TestAccCodeBuildProject_SecondaryArtifacts_packaging (0.18s) === CONT TestAccCodeBuildProject_SecondaryArtifacts_path --- PASS: TestAccCodeBuildProject_SecondarySources_codeCommit (26.90s) === CONT TestAccCodeBuildProject_SecondaryArtifacts_namespaceType === NAME TestAccCodeBuildProject_SecondaryArtifacts_path source_credential_test.go:144: skipping acceptance testing: Source Credentials (GITHUB) not found --- SKIP: TestAccCodeBuildProject_SecondaryArtifacts_path (0.18s) === CONT TestAccCodeBuildProject_SecondaryArtifacts_location === NAME TestAccCodeBuildProject_SecondaryArtifacts_namespaceType source_credential_test.go:144: skipping acceptance testing: Source Credentials (GITHUB) not found --- SKIP: TestAccCodeBuildProject_SecondaryArtifacts_namespaceType (0.19s) === CONT TestAccCodeBuildProject_SecondaryArtifacts_encryptionDisabled === NAME TestAccCodeBuildProject_SecondaryArtifacts_location source_credential_test.go:144: skipping acceptance testing: Source Credentials (GITHUB) not found --- SKIP: TestAccCodeBuildProject_SecondaryArtifacts_location (0.20s) === CONT TestAccCodeBuildProject_SecondaryArtifacts_overrideArtifactName === NAME TestAccCodeBuildProject_SecondaryArtifacts_encryptionDisabled source_credential_test.go:144: skipping acceptance testing: Source Credentials (GITHUB) not found --- SKIP: TestAccCodeBuildProject_SecondaryArtifacts_encryptionDisabled (0.19s) === CONT TestAccCodeBuildProject_SecondaryArtifacts_artifactIdentifier === NAME TestAccCodeBuildProject_SecondaryArtifacts_overrideArtifactName source_credential_test.go:144: skipping acceptance testing: Source Credentials (GITHUB) not found --- SKIP: TestAccCodeBuildProject_SecondaryArtifacts_overrideArtifactName (0.18s) === CONT TestAccCodeBuildProject_secondaryArtifacts === NAME TestAccCodeBuildProject_SecondaryArtifacts_artifactIdentifier source_credential_test.go:144: skipping acceptance testing: Source Credentials (GITHUB) not found --- SKIP: TestAccCodeBuildProject_SecondaryArtifacts_artifactIdentifier (0.19s) === CONT TestAccCodeBuildProject_Artifacts_bucketOwnerAccess === NAME TestAccCodeBuildProject_secondaryArtifacts source_credential_test.go:144: skipping acceptance testing: Source Credentials (GITHUB) not found --- SKIP: TestAccCodeBuildProject_secondaryArtifacts (0.18s) === CONT TestAccCodeBuildProject_Artifacts_type --- PASS: TestAccCodeBuildProject_Artifacts_type (45.61s) === CONT TestAccCodeBuildProject_Artifacts_path --- PASS: TestAccCodeBuildProject_Artifacts_bucketOwnerAccess (45.65s) === CONT TestAccCodeBuildProject_Artifacts_packaging === NAME TestAccCodeBuildProject_Artifacts_path source_credential_test.go:144: skipping acceptance testing: Source Credentials (GITHUB) not found --- SKIP: TestAccCodeBuildProject_Artifacts_path (0.19s) === CONT TestAccCodeBuildProject_Artifacts_overrideArtifactName === NAME TestAccCodeBuildProject_Artifacts_packaging source_credential_test.go:144: skipping acceptance testing: Source Credentials (GITHUB) not found --- SKIP: TestAccCodeBuildProject_Artifacts_packaging (0.18s) === CONT TestAccCodeBuildProject_Artifacts_namespaceType === NAME TestAccCodeBuildProject_Artifacts_overrideArtifactName source_credential_test.go:144: skipping acceptance testing: Source Credentials (GITHUB) not found --- SKIP: TestAccCodeBuildProject_Artifacts_overrideArtifactName (0.19s) === CONT TestAccCodeBuildProject_Artifacts_name === NAME TestAccCodeBuildProject_Artifacts_namespaceType source_credential_test.go:144: skipping acceptance testing: Source Credentials (GITHUB) not found --- SKIP: TestAccCodeBuildProject_Artifacts_namespaceType (0.19s) === CONT TestAccCodeBuildProject_Artifacts_location === NAME TestAccCodeBuildProject_Artifacts_name source_credential_test.go:144: skipping acceptance testing: Source Credentials (GITHUB) not found --- SKIP: TestAccCodeBuildProject_Artifacts_name (0.18s) === CONT TestAccCodeBuildProject_Artifacts_encryptionDisabled === NAME TestAccCodeBuildProject_Artifacts_location source_credential_test.go:144: skipping acceptance testing: Source Credentials (GITHUB) not found --- SKIP: TestAccCodeBuildProject_Artifacts_location (0.18s) === CONT TestAccCodeBuildProject_Artifacts_artifactIdentifier === NAME TestAccCodeBuildProject_Artifacts_encryptionDisabled source_credential_test.go:144: skipping acceptance testing: Source Credentials (GITHUB) not found --- SKIP: TestAccCodeBuildProject_Artifacts_encryptionDisabled (0.19s) === CONT TestAccCodeBuildProject_linuxLambdaContainer === NAME TestAccCodeBuildProject_Artifacts_artifactIdentifier source_credential_test.go:144: skipping acceptance testing: Source Credentials (GITHUB) not found --- SKIP: TestAccCodeBuildProject_Artifacts_artifactIdentifier (0.19s) === CONT TestAccCodeBuildProject_armContainer === NAME TestAccCodeBuildProject_linuxLambdaContainer source_credential_test.go:144: skipping acceptance testing: Source Credentials (GITHUB) not found --- SKIP: TestAccCodeBuildProject_linuxLambdaContainer (0.19s) === CONT TestAccCodeBuildProject_windowsServer2019Container === NAME TestAccCodeBuildProject_armContainer source_credential_test.go:144: skipping acceptance testing: Source Credentials (GITHUB) not found --- SKIP: TestAccCodeBuildProject_armContainer (0.19s) === CONT TestAccCodeBuildProject_vpc === NAME TestAccCodeBuildProject_windowsServer2019Container source_credential_test.go:144: skipping acceptance testing: Source Credentials (GITHUB) not found --- SKIP: TestAccCodeBuildProject_windowsServer2019Container (0.19s) === CONT TestAccCodeBuildProject_SourceType_noSourceInvalid === NAME TestAccCodeBuildProject_vpc source_credential_test.go:144: skipping acceptance testing: Source Credentials (GITHUB) not found --- SKIP: TestAccCodeBuildProject_vpc (0.19s) === CONT TestAccCodeBuildProject_SourceType_noSource --- PASS: TestAccCodeBuildProject_SourceType_noSourceInvalid (13.34s) === CONT TestAccCodeBuildProject_SourceType_s3 --- PASS: TestAccCodeBuildProject_SourceType_noSource (29.19s) === CONT TestAccCodeBuildProject_SourceType_gitHubEnterprise source_credential_test.go:144: skipping acceptance testing: Source Credentials (GITHUB_ENTERPRISE) not found --- SKIP: TestAccCodeBuildProject_SourceType_gitHubEnterprise (0.19s) === CONT TestAccCodeBuildProject_SourceType_codePipeline --- PASS: TestAccCodeBuildProject_SourceType_s3 (29.29s) === CONT TestAccCodeBuildProject_SourceType_codeCommit --- PASS: TestAccCodeBuildProject_SourceType_codePipeline (29.79s) === CONT TestAccCodeBuildProject_SourceType_bitbucket source_credential_test.go:144: skipping acceptance testing: Source Credentials (BITBUCKET) not found --- SKIP: TestAccCodeBuildProject_SourceType_bitbucket (0.18s) === CONT TestAccCodeBuildProject_SourceReportBuildStatus_gitHub source_credential_test.go:144: skipping acceptance testing: Source Credentials (GITHUB) not found --- SKIP: TestAccCodeBuildProject_SourceReportBuildStatus_gitHub (0.18s) === CONT TestAccCodeBuildProject_Environment_registryCredential source_credential_test.go:144: skipping acceptance testing: Source Credentials (GITHUB) not found --- SKIP: TestAccCodeBuildProject_Environment_registryCredential (0.20s) === CONT TestAccCodeBuildProject_SourceReportBuildStatus_gitHubEnterprise source_credential_test.go:144: skipping acceptance testing: Source Credentials (GITHUB_ENTERPRISE) not found --- SKIP: TestAccCodeBuildProject_SourceReportBuildStatus_gitHubEnterprise (0.17s) === CONT TestAccCodeBuildProject_SourceBuildStatus_gitHubEnterprise source_credential_test.go:144: skipping acceptance testing: Source Credentials (GITHUB_ENTERPRISE) not found --- SKIP: TestAccCodeBuildProject_SourceBuildStatus_gitHubEnterprise (0.18s) === CONT TestAccCodeBuildProject_Source_insecureSSL source_credential_test.go:144: skipping acceptance testing: Source Credentials (GITHUB) not found --- SKIP: TestAccCodeBuildProject_Source_insecureSSL (0.18s) === CONT TestAccCodeBuildProject_concurrentBuildLimit source_credential_test.go:144: skipping acceptance testing: Source Credentials (GITHUB) not found --- SKIP: TestAccCodeBuildProject_concurrentBuildLimit (0.18s) === CONT TestAccCodeBuildProject_description source_credential_test.go:144: skipping acceptance testing: Source Credentials (GITHUB) not found --- SKIP: TestAccCodeBuildProject_description (0.20s) === CONT TestAccCodeBuildProject_Environment_certificate source_credential_test.go:144: skipping acceptance testing: Source Credentials (GITHUB) not found --- SKIP: TestAccCodeBuildProject_Environment_certificate (0.19s) === CONT TestAccCodeBuildProject_EnvironmentEnvironmentVariable_value source_credential_test.go:144: skipping acceptance testing: Source Credentials (GITHUB) not found --- SKIP: TestAccCodeBuildProject_EnvironmentEnvironmentVariable_value (0.19s) === CONT TestAccCodeBuildProject_EnvironmentEnvironmentVariable_type source_credential_test.go:144: skipping acceptance testing: Source Credentials (GITHUB) not found --- SKIP: TestAccCodeBuildProject_EnvironmentEnvironmentVariable_type (0.19s) === CONT TestAccCodeBuildProject_Environment_environmentVariable source_credential_test.go:144: skipping acceptance testing: Source Credentials (GITHUB) not found --- SKIP: TestAccCodeBuildProject_Environment_environmentVariable (0.18s) === CONT TestAccCodeBuildProject_encryptionKey source_credential_test.go:144: skipping acceptance testing: Source Credentials (GITHUB) not found --- SKIP: TestAccCodeBuildProject_encryptionKey (0.18s) === CONT TestAccCodeBuildProject_sourceVersion source_credential_test.go:144: skipping acceptance testing: Source Credentials (GITHUB) not found --- SKIP: TestAccCodeBuildProject_sourceVersion (0.18s) === CONT TestAccCodeBuildProject_fileSystemLocations source_credential_test.go:144: skipping acceptance testing: Source Credentials (GITHUB) not found --- SKIP: TestAccCodeBuildProject_fileSystemLocations (0.18s) === CONT TestAccCodeBuildProject_badgeEnabled source_credential_test.go:144: skipping acceptance testing: Source Credentials (GITHUB) not found --- SKIP: TestAccCodeBuildProject_badgeEnabled (0.18s) === CONT TestAccCodeBuildProject_SecondarySourcesVersions --- PASS: TestAccCodeBuildProject_SourceType_codeCommit (29.71s) === CONT TestAccCodeBuildProject_cache source_credential_test.go:144: skipping acceptance testing: Source Credentials (GITHUB) not found --- SKIP: TestAccCodeBuildProject_cache (0.18s) === CONT TestAccCodeBuildProject_SecondarySourcesGitSubmodules_gitHubEnterprise source_credential_test.go:144: skipping acceptance testing: Source Credentials (GITHUB_ENTERPRISE) not found --- SKIP: TestAccCodeBuildProject_SecondarySourcesGitSubmodules_gitHubEnterprise (0.19s) === CONT TestAccCodeBuildProject_queuedTimeout source_credential_test.go:144: skipping acceptance testing: Source Credentials (GITHUB) not found --- SKIP: TestAccCodeBuildProject_queuedTimeout (0.18s) === CONT TestAccCodeBuildProject_buildTimeout source_credential_test.go:144: skipping acceptance testing: Source Credentials (GITHUB) not found --- SKIP: TestAccCodeBuildProject_buildTimeout (0.18s) === CONT TestAccCodeBuildProject_tags source_credential_test.go:144: skipping acceptance testing: Source Credentials (GITHUB) not found --- SKIP: TestAccCodeBuildProject_tags (0.18s) === CONT TestAccCodeBuildProject_SecondarySourcesGitSubmodules_gitHub source_credential_test.go:144: skipping acceptance testing: Source Credentials (GITHUB) not found --- SKIP: TestAccCodeBuildProject_SecondarySourcesGitSubmodules_gitHub (0.19s) === CONT TestAccCodeBuildProject_SecondarySourcesGitSubmodules_codeCommit --- PASS: TestAccCodeBuildProject_SecondarySourcesGitSubmodules_codeCommit (52.04s) === CONT TestAccCodeBuildProject_SourceGitSubmodules_gitHubEnterprise source_credential_test.go:144: skipping acceptance testing: Source Credentials (GITHUB_ENTERPRISE) not found --- SKIP: TestAccCodeBuildProject_SourceGitSubmodules_gitHubEnterprise (0.19s) === CONT TestAccCodeBuildProject_SourceGitSubmodules_gitHub source_credential_test.go:144: skipping acceptance testing: Source Credentials (GITHUB) not found --- SKIP: TestAccCodeBuildProject_SourceGitSubmodules_gitHub (0.18s) === CONT TestAccCodeBuildProject_SourceGitSubmodules_codeCommit --- PASS: TestAccCodeBuildProject_SecondarySourcesVersions (64.28s) === CONT TestAccCodeBuildProject_publicVisibility source_credential_test.go:144: skipping acceptance testing: Source Credentials (GITHUB) not found --- SKIP: TestAccCodeBuildProject_publicVisibility (0.17s) === CONT TestAccCodeBuildProject_Source_gitCloneDepth source_credential_test.go:144: skipping acceptance testing: Source Credentials (GITHUB) not found --- SKIP: TestAccCodeBuildProject_Source_gitCloneDepth (0.43s) === CONT TestAccCodeBuildProject_buildBatchConfigDelete source_credential_test.go:144: skipping acceptance testing: Source Credentials (GITHUB) not found --- SKIP: TestAccCodeBuildProject_buildBatchConfigDelete (0.18s) === CONT TestAccCodeBuildProject_Logs_s3Logs source_credential_test.go:144: skipping acceptance testing: Source Credentials (GITHUB) not found --- SKIP: TestAccCodeBuildProject_Logs_s3Logs (0.18s) === CONT TestAccCodeBuildProject_Logs_cloudWatchLogs source_credential_test.go:144: skipping acceptance testing: Source Credentials (GITHUB) not found --- SKIP: TestAccCodeBuildProject_Logs_cloudWatchLogs (0.17s) === CONT TestAccCodeBuildProject_buildBatch source_credential_test.go:144: skipping acceptance testing: Source Credentials (GITHUB) not found --- SKIP: TestAccCodeBuildProject_buildBatch (0.18s) === CONT TestAccCodeBuildProject_disappears source_credential_test.go:144: skipping acceptance testing: Source Credentials (GITHUB) not found --- SKIP: TestAccCodeBuildProject_disappears (0.18s) --- PASS: TestAccCodeBuildProject_SourceGitSubmodules_codeCommit (39.93s) PASS ok github.com/hashicorp/terraform-provider-aws/internal/service/codebuild 250.741s From 5b3a4461c75b8ac13439da34380860f1ebdc3090 Mon Sep 17 00:00:00 2001 From: Kit Ewbank Date: Tue, 23 Jan 2024 11:50:19 -0500 Subject: [PATCH 25/30] r/aws_codebuild_project: Tidy up acceptance tests. --- internal/service/codebuild/project_test.go | 75 ++++++---------------- 1 file changed, 21 insertions(+), 54 deletions(-) diff --git a/internal/service/codebuild/project_test.go b/internal/service/codebuild/project_test.go index e47f23ad933..a50e8d3c97c 100644 --- a/internal/service/codebuild/project_test.go +++ b/internal/service/codebuild/project_test.go @@ -3407,7 +3407,7 @@ resource "aws_subnet" "public" { vpc_id = aws_vpc.test.id tags = { - Name = "%[1]s-public" + Name = %[1]q } } @@ -3415,7 +3415,7 @@ resource "aws_route_table" "public" { vpc_id = aws_vpc.test.id tags = { - Name = "%[1]s-public" + Name = %[1]q } } @@ -3436,7 +3436,7 @@ resource "aws_subnet" "private" { vpc_id = aws_vpc.test.id tags = { - Name = "%[1]s-private" + Name = %[1]q } } @@ -3463,7 +3463,7 @@ resource "aws_route_table" "private" { vpc_id = aws_vpc.test.id tags = { - Name = "%[1]s-private" + Name = %[1]q } } @@ -3481,6 +3481,10 @@ resource "aws_route" "private" { resource "aws_security_group" "test" { name = %[1]q vpc_id = aws_vpc.test.id + + tags = { + Name = %[1]q + } } resource "aws_codebuild_project" "test" { @@ -4620,36 +4624,24 @@ resource "aws_codebuild_project" "test" { `, rName, rLocation, rBuildspec)) } -func testAccProjectConfig_vpc1(rName string) string { - return acctest.ConfigCompose( - testAccProjectConfig_baseServiceRole(rName), - acctest.ConfigAvailableAZsNoOptIn(), - fmt.Sprintf(` -resource "aws_vpc" "test" { - cidr_block = "10.0.0.0/16" - - tags = { - Name = %[1]q - } -} - -resource "aws_subnet" "test" { - count = 1 - - availability_zone = data.aws_availability_zones.available.names[count.index] - cidr_block = "10.0.0.0/24" - vpc_id = aws_vpc.test.id +func testAccProjectConfig_baseVPC(rName string) string { + return acctest.ConfigCompose(acctest.ConfigVPCWithSubnets(rName, 2), fmt.Sprintf(` +resource "aws_security_group" "test" { + name = %[1]q + vpc_id = aws_vpc.test.id tags = { Name = %[1]q } } - -resource "aws_security_group" "test" { - name = %[1]q - vpc_id = aws_vpc.test.id +`, rName)) } +func testAccProjectConfig_vpc1(rName string) string { + return acctest.ConfigCompose( + testAccProjectConfig_baseServiceRole(rName), + testAccProjectConfig_baseVPC(rName), + fmt.Sprintf(` resource "aws_codebuild_project" "test" { name = %[1]q service_role = aws_iam_role.test.arn @@ -4671,7 +4663,7 @@ resource "aws_codebuild_project" "test" { vpc_config { security_group_ids = [aws_security_group.test.id] - subnets = aws_subnet.test[*].id + subnets = aws_subnet.test[0].id vpc_id = aws_vpc.test.id } } @@ -4681,33 +4673,8 @@ resource "aws_codebuild_project" "test" { func testAccProjectConfig_vpc2(rName string) string { return acctest.ConfigCompose( testAccProjectConfig_baseServiceRole(rName), - acctest.ConfigAvailableAZsNoOptIn(), + testAccProjectConfig_baseVPC(rName), fmt.Sprintf(` -resource "aws_vpc" "test" { - cidr_block = "10.0.0.0/16" - - tags = { - Name = %[1]q - } -} - -resource "aws_subnet" "test" { - count = 2 - - availability_zone = data.aws_availability_zones.available.names[count.index] - cidr_block = "10.0.${count.index}.0/24" - vpc_id = aws_vpc.test.id - - tags = { - Name = %[1]q - } -} - -resource "aws_security_group" "test" { - name = %[1]q - vpc_id = aws_vpc.test.id -} - resource "aws_codebuild_project" "test" { name = %[1]q service_role = aws_iam_role.test.arn From 7c5d016dd7dfc1f62499fb4f5c928cb4bdd2c342 Mon Sep 17 00:00:00 2001 From: Kit Ewbank Date: Tue, 23 Jan 2024 12:18:41 -0500 Subject: [PATCH 26/30] r/aws_codebuild_project: Fix some more 'ImportStateVerify attributes not equivalent' acceptance test errors (#35108). --- internal/service/codebuild/project_test.go | 45 ---------------------- 1 file changed, 45 deletions(-) diff --git a/internal/service/codebuild/project_test.go b/internal/service/codebuild/project_test.go index a50e8d3c97c..709739ec414 100644 --- a/internal/service/codebuild/project_test.go +++ b/internal/service/codebuild/project_test.go @@ -2406,11 +2406,6 @@ func TestAccCodeBuildProject_secondaryArtifacts(t *testing.T) { resource.TestCheckResourceAttr(resourceName, "secondary_artifacts.#", "2"), ), }, - { - ResourceName: resourceName, - ImportState: true, - ImportStateVerify: true, - }, { Config: testAccProjectConfig_secondaryArtifactsNone(rName), Check: resource.ComposeTestCheckFunc( @@ -2451,11 +2446,6 @@ func TestAccCodeBuildProject_SecondaryArtifacts_artifactIdentifier(t *testing.T) }), ), }, - { - ResourceName: resourceName, - ImportState: true, - ImportStateVerify: true, - }, { Config: testAccProjectConfig_secondaryArtifactsArtifactIdentifier(rName, artifactIdentifier2), Check: resource.ComposeTestCheckFunc( @@ -2496,11 +2486,6 @@ func TestAccCodeBuildProject_SecondaryArtifacts_overrideArtifactName(t *testing. }), ), }, - { - ResourceName: resourceName, - ImportState: true, - ImportStateVerify: true, - }, { Config: testAccProjectConfig_secondaryArtifactsOverrideArtifactName(rName, false), Check: resource.ComposeTestCheckFunc( @@ -2541,11 +2526,6 @@ func TestAccCodeBuildProject_SecondaryArtifacts_encryptionDisabled(t *testing.T) }), ), }, - { - ResourceName: resourceName, - ImportState: true, - ImportStateVerify: true, - }, { Config: testAccProjectConfig_secondaryArtifactsEncryptionDisabled(rName, false), Check: resource.ComposeTestCheckFunc( @@ -2587,11 +2567,6 @@ func TestAccCodeBuildProject_SecondaryArtifacts_location(t *testing.T) { }), ), }, - { - ResourceName: resourceName, - ImportState: true, - ImportStateVerify: true, - }, { Config: testAccProjectConfig_secondaryArtifactsLocation(rName1, rName2), Check: resource.ComposeTestCheckFunc( @@ -2638,11 +2613,6 @@ func TestAccCodeBuildProject_SecondaryArtifacts_name(t *testing.T) { }), ), }, - { - ResourceName: resourceName, - ImportState: true, - ImportStateVerify: true, - }, { Config: testAccProjectConfig_secondaryArtifactsName(rName, name2), Check: resource.ComposeTestCheckFunc( @@ -2683,11 +2653,6 @@ func TestAccCodeBuildProject_SecondaryArtifacts_namespaceType(t *testing.T) { }), ), }, - { - ResourceName: resourceName, - ImportState: true, - ImportStateVerify: true, - }, { Config: testAccProjectConfig_secondaryArtifactsNamespaceType(rName, string(types.ArtifactNamespaceNone)), Check: resource.ComposeTestCheckFunc( @@ -2731,11 +2696,6 @@ func TestAccCodeBuildProject_SecondaryArtifacts_path(t *testing.T) { }), ), }, - { - ResourceName: resourceName, - ImportState: true, - ImportStateVerify: true, - }, { Config: testAccProjectConfig_secondaryArtifactsPath(rName, path2), Check: resource.ComposeTestCheckFunc( @@ -2776,11 +2736,6 @@ func TestAccCodeBuildProject_SecondaryArtifacts_packaging(t *testing.T) { }), ), }, - { - ResourceName: resourceName, - ImportState: true, - ImportStateVerify: true, - }, { Config: testAccProjectConfig_secondaryArtifactsPackaging(rName, string(types.ArtifactPackagingNone)), Check: resource.ComposeTestCheckFunc( From c6ea4a8ff0d0d6bb4d75cae3ec8736d1177419a8 Mon Sep 17 00:00:00 2001 From: Kit Ewbank Date: Tue, 23 Jan 2024 12:30:38 -0500 Subject: [PATCH 27/30] r/aws_codebuild_project: Fix typos. --- internal/service/codebuild/project.go | 23 +++++++++-------------- 1 file changed, 9 insertions(+), 14 deletions(-) diff --git a/internal/service/codebuild/project.go b/internal/service/codebuild/project.go index 6d460fe011b..580a395f868 100644 --- a/internal/service/codebuild/project.go +++ b/internal/service/codebuild/project.go @@ -807,7 +807,7 @@ func resourceProjectCreate(ctx context.Context, d *schema.ResourceData, meta int input.TimeoutInMinutes = aws.Int32(int32(v.(int))) } - if v, ok := d.GetOk("environvpc_configment"); ok && len(v.([]interface{})) > 0 && v.([]interface{})[0] != nil { + if v, ok := d.GetOk("vpc_config"); ok && len(v.([]interface{})) > 0 && v.([]interface{})[0] != nil { input.VpcConfig = expandVPCConfig(v.([]interface{})[0].(map[string]interface{})) } @@ -1058,7 +1058,7 @@ func resourceProjectUpdate(ctx context.Context, d *schema.ResourceData, meta int } if d.HasChange("vpc_config") { - if v, ok := d.GetOk("environvpc_configment"); ok && len(v.([]interface{})) > 0 && v.([]interface{})[0] != nil { + if v, ok := d.GetOk("vpc_config"); ok && len(v.([]interface{})) > 0 && v.([]interface{})[0] != nil { input.VpcConfig = expandVPCConfig(v.([]interface{})[0].(map[string]interface{})) } } @@ -1652,14 +1652,9 @@ func flattenLogsConfig(apiObject *types.LogsConfig) []interface{} { return []interface{}{} } - tfMap := map[string]interface{}{} - - if v := apiObject.CloudWatchLogs; v != nil { - tfMap["cloudwatch_logs"] = flattenCloudWatchLogs(v) - } - - if v := apiObject.S3Logs; v != nil { - tfMap["s3_logs"] = flattenS3Logs(v) + tfMap := map[string]interface{}{ + "cloudwatch_logs": flattenCloudWatchLogs(apiObject.CloudWatchLogs), + "s3_logs": flattenS3Logs(apiObject.S3Logs), } return []interface{}{tfMap} @@ -1671,8 +1666,8 @@ func flattenCloudWatchLogs(apiObject *types.CloudWatchLogsConfig) []interface{} if apiObject == nil { tfMap["status"] = types.LogsConfigStatusTypeDisabled } else { - tfMap["status"] = apiObject.Status tfMap["group_name"] = aws.ToString(apiObject.GroupName) + tfMap["status"] = apiObject.Status tfMap["stream_name"] = aws.ToString(apiObject.StreamName) } @@ -1685,10 +1680,10 @@ func flattenS3Logs(apiObject *types.S3LogsConfig) []interface{} { if apiObject == nil { tfMap["status"] = types.LogsConfigStatusTypeDisabled } else { - tfMap["status"] = apiObject.Status - tfMap["location"] = aws.ToString(apiObject.Location) - tfMap["encryption_disabled"] = aws.ToBool(apiObject.EncryptionDisabled) tfMap["bucket_owner_access"] = apiObject.BucketOwnerAccess + tfMap["encryption_disabled"] = aws.ToBool(apiObject.EncryptionDisabled) + tfMap["location"] = aws.ToString(apiObject.Location) + tfMap["status"] = apiObject.Status } return []interface{}{tfMap} From 0fdc0eb0780def07effd666e1452092bdd9458d9 Mon Sep 17 00:00:00 2001 From: Kit Ewbank Date: Tue, 23 Jan 2024 13:48:03 -0500 Subject: [PATCH 28/30] r/aws_codebuild_project: Fix typos. --- internal/service/codebuild/project_test.go | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/internal/service/codebuild/project_test.go b/internal/service/codebuild/project_test.go index 709739ec414..5534975ee84 100644 --- a/internal/service/codebuild/project_test.go +++ b/internal/service/codebuild/project_test.go @@ -99,11 +99,12 @@ func TestAccCodeBuildProject_basic(t *testing.T) { Steps: []resource.TestStep{ { Config: testAccProjectConfig_basic(rName), - Check: resource.ComposeTestCheckFunc( + Check: resource.ComposeAggregateTestCheckFunc( testAccCheckProjectExists(ctx, resourceName, &project), acctest.CheckResourceAttrRegionalARN(resourceName, "arn", "codebuild", fmt.Sprintf("project/%s", rName)), resource.TestCheckResourceAttr(resourceName, "artifacts.#", "1"), resource.TestCheckResourceAttr(resourceName, "badge_enabled", "false"), + resource.TestCheckResourceAttr(resourceName, "build_batch_config.#", "0"), resource.TestCheckResourceAttr(resourceName, "build_timeout", "60"), resource.TestCheckResourceAttr(resourceName, "queued_timeout", "480"), resource.TestCheckResourceAttr(resourceName, "cache.#", "1"), @@ -120,19 +121,18 @@ func TestAccCodeBuildProject_basic(t *testing.T) { resource.TestCheckResourceAttr(resourceName, "logs_config.0.cloudwatch_logs.0.status", string(types.LogsConfigStatusTypeEnabled)), resource.TestCheckResourceAttr(resourceName, "logs_config.0.s3_logs.0.status", string(types.LogsConfigStatusTypeDisabled)), resource.TestCheckResourceAttr(resourceName, "project_visibility", "PRIVATE"), + resource.TestCheckResourceAttr(resourceName, "secondary_artifacts.#", "0"), + resource.TestCheckResourceAttr(resourceName, "secondary_sources.#", "0"), + resource.TestCheckResourceAttr(resourceName, "secondary_source_version.#", "0"), resource.TestCheckResourceAttrPair(resourceName, "service_role", roleResourceName, "arn"), resource.TestCheckResourceAttr(resourceName, "source.#", "1"), resource.TestCheckResourceAttr(resourceName, "source.0.git_clone_depth", "0"), resource.TestCheckResourceAttr(resourceName, "source.0.insecure_ssl", "false"), - resource.TestCheckResourceAttr(resourceName, "source.0.location", "https://github.com/hashibot-test/aws-test.git"), + resource.TestCheckResourceAttr(resourceName, "source.0.location", testAccGitHubSourceLocationFromEnv()), resource.TestCheckResourceAttr(resourceName, "source.0.report_build_status", "false"), resource.TestCheckResourceAttr(resourceName, "source.0.type", "GITHUB"), - resource.TestCheckResourceAttr(resourceName, "secondary_source_version.#", "0"), - resource.TestCheckResourceAttr(resourceName, "build_batch_config.#", "0"), - resource.TestCheckResourceAttr(resourceName, "secondary_artifacts.#", "0"), - resource.TestCheckResourceAttr(resourceName, "secondary_sources.#", "0"), - resource.TestCheckResourceAttr(resourceName, "vpc_config.#", "0"), resource.TestCheckResourceAttr(resourceName, "tags.%", "0"), + resource.TestCheckResourceAttr(resourceName, "vpc_config.#", "0"), ), }, { @@ -4618,7 +4618,7 @@ resource "aws_codebuild_project" "test" { vpc_config { security_group_ids = [aws_security_group.test.id] - subnets = aws_subnet.test[0].id + subnets = [aws_subnet.test[0].id] vpc_id = aws_vpc.test.id } } From 4eed2ab3ebfa7776246ad67e9cf3579fc179197a Mon Sep 17 00:00:00 2001 From: Kit Ewbank Date: Tue, 23 Jan 2024 13:49:02 -0500 Subject: [PATCH 29/30] r/aws_codebuild_project: Correct 'vpc_config' and 'logs_config' flex. --- internal/service/codebuild/project.go | 39 +++++++++++++-------------- 1 file changed, 18 insertions(+), 21 deletions(-) diff --git a/internal/service/codebuild/project.go b/internal/service/codebuild/project.go index 580a395f868..c843c8c4fde 100644 --- a/internal/service/codebuild/project.go +++ b/internal/service/codebuild/project.go @@ -734,9 +734,10 @@ func resourceProjectCreate(ctx context.Context, d *schema.ResourceData, meta int name := d.Get("name").(string) input := &codebuild.CreateProjectInput{ - Name: aws.String(name), - Source: projectSource, - Tags: getTagsIn(ctx), + LogsConfig: expandProjectLogsConfig(d.Get("logs_config")), + Name: aws.String(name), + Source: projectSource, + Tags: getTagsIn(ctx), } if v, ok := d.GetOk("artifacts"); ok && len(v.([]interface{})) > 0 && v.([]interface{})[0] != nil { @@ -775,10 +776,6 @@ func resourceProjectCreate(ctx context.Context, d *schema.ResourceData, meta int input.FileSystemLocations = expandProjectFileSystemLocations(v.(*schema.Set).List()) } - if v, ok := d.GetOk("logs_config"); ok && len(v.([]interface{})) > 0 && v.([]interface{})[0] != nil { - input.LogsConfig = expandProjectLogsConfig(v.([]interface{})[0].(map[string]interface{})) - } - if v, ok := d.GetOk("queued_timeout"); ok { input.QueuedTimeoutInMinutes = aws.Int32(int32(v.(int))) } @@ -1006,9 +1003,7 @@ func resourceProjectUpdate(ctx context.Context, d *schema.ResourceData, meta int } if d.HasChange("logs_config") { - if v, ok := d.GetOk("logs_config"); ok && len(v.([]interface{})) > 0 && v.([]interface{})[0] != nil { - input.LogsConfig = expandProjectLogsConfig(v.([]interface{})[0].(map[string]interface{})) - } + input.LogsConfig = expandProjectLogsConfig(d.Get("logs_config")) } if d.HasChange("queued_timeout") { @@ -1060,6 +1055,8 @@ func resourceProjectUpdate(ctx context.Context, d *schema.ResourceData, meta int if d.HasChange("vpc_config") { if v, ok := d.GetOk("vpc_config"); ok && len(v.([]interface{})) > 0 && v.([]interface{})[0] != nil { input.VpcConfig = expandVPCConfig(v.([]interface{})[0].(map[string]interface{})) + } else { + input.VpcConfig = &types.VpcConfig{} } } @@ -1394,19 +1391,19 @@ func expandProjectEnvironment(tfMap map[string]interface{}) *types.ProjectEnviro return apiObject } -func expandProjectLogsConfig(tfMap map[string]interface{}) *types.LogsConfig { - if tfMap == nil { - return nil - } - +func expandProjectLogsConfig(v interface{}) *types.LogsConfig { apiObject := &types.LogsConfig{} - if v, ok := tfMap["cloudwatch_logs"].([]interface{}); ok && len(v) > 0 && v[0] != nil { - apiObject.CloudWatchLogs = expandCloudWatchLogsConfig(v[0].(map[string]interface{})) - } + if v, ok := v.([]interface{}); ok && len(v) > 0 && v[0] != nil { + if tfMap := v[0].(map[string]interface{}); tfMap != nil { + if v, ok := tfMap["cloudwatch_logs"].([]interface{}); ok && len(v) > 0 && v[0] != nil { + apiObject.CloudWatchLogs = expandCloudWatchLogsConfig(v[0].(map[string]interface{})) + } - if v, ok := tfMap["s3_logs"].([]interface{}); ok && len(v) > 0 && v[0] != nil { - apiObject.S3Logs = expandS3LogsConfig(v[0].(map[string]interface{})) + if v, ok := tfMap["s3_logs"].([]interface{}); ok && len(v) > 0 && v[0] != nil { + apiObject.S3Logs = expandS3LogsConfig(v[0].(map[string]interface{})) + } + } } if apiObject.CloudWatchLogs == nil { @@ -1625,7 +1622,7 @@ func flattenProjectFileSystemLocations(apiObjects []types.ProjectFileSystemLocat func flattenProjectFileSystemLocation(apiObject types.ProjectFileSystemLocation) map[string]interface{} { tfMap := map[string]interface{}{ - "types": apiObject.Type, + "type": apiObject.Type, } if v := apiObject.Identifier; v != nil { From 99bfcb33c3c6ed7e4d62f46f5520e6397b1f8feb Mon Sep 17 00:00:00 2001 From: Kit Ewbank Date: Tue, 23 Jan 2024 13:52:59 -0500 Subject: [PATCH 30/30] Fix terrafmt errors. --- internal/service/codebuild/project_test.go | 44 +++++++++++----------- 1 file changed, 22 insertions(+), 22 deletions(-) diff --git a/internal/service/codebuild/project_test.go b/internal/service/codebuild/project_test.go index 5534975ee84..5720f75cbca 100644 --- a/internal/service/codebuild/project_test.go +++ b/internal/service/codebuild/project_test.go @@ -3886,23 +3886,23 @@ resource "aws_codebuild_project" "test" { func testAccProjectConfig_buildBatchConfigDelete(rName string, withBuildBatchConfig bool) string { template := ` resource "aws_codebuild_project" "test" { - name = %[1]q - service_role = aws_iam_role.test.arn + name = %[1]q + service_role = aws_iam_role.test.arn - artifacts { - type = "NO_ARTIFACTS" - } + artifacts { + type = "NO_ARTIFACTS" + } - environment { - compute_type = "BUILD_GENERAL1_SMALL" - image = "2" - type = "LINUX_CONTAINER" - } + environment { + compute_type = "BUILD_GENERAL1_SMALL" + image = "2" + type = "LINUX_CONTAINER" + } - source { - location = "https://github.com/hashicorp/packer.git" - type = "GITHUB" - } + source { + location = "https://github.com/hashicorp/packer.git" + type = "GITHUB" + } %[2]s } @@ -3910,17 +3910,17 @@ resource "aws_codebuild_project" "test" { buildBatchConfig := ` build_batch_config { - combine_artifacts = true + combine_artifacts = true - restrictions { - compute_types_allowed = [] - maximum_builds_allowed = 10 - } + restrictions { + compute_types_allowed = [] + maximum_builds_allowed = 10 + } - service_role = aws_iam_role.test.arn - timeout_in_mins = 480 + service_role = aws_iam_role.test.arn + timeout_in_mins = 480 } - ` +` if withBuildBatchConfig { return acctest.ConfigCompose(testAccProjectConfig_baseServiceRole(rName), fmt.Sprintf(template, rName, buildBatchConfig))